From 7529680216dd476b9b560c63d20dd387e729df11 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Fri, 14 Aug 2020 13:56:37 +0200 Subject: [PATCH] Additional tests, refactoring and bug fixes (e.g. #2) --- package.json | 6 +- src/error.js | 13 +- src/errorlist.js | 11 +- src/jsonschema.js | 58 +- src/main.js | 4 +- src/node.js | 108 +- src/process.js | 91 +- src/processgraph.js | 114 +- src/registry.js | 19 +- src/utils.js | 104 ++ tests/assets/invalid_args.json | 18 + tests/assets/param_in_obj_arg.json | 18 + tests/assets/processes.json | 1541 +++++++++++++++++----------- tests/assets/undefined_param.json | 13 + tests/jsonschema.test.js | 60 +- tests/node.test.js | 184 +++- tests/processgraph.test.js | 249 ++++- tests/registry.test.js | 35 +- tests/utils.test.js | 104 ++ 19 files changed, 1931 insertions(+), 819 deletions(-) create mode 100644 src/utils.js create mode 100644 tests/assets/invalid_args.json create mode 100644 tests/assets/param_in_obj_arg.json create mode 100644 tests/assets/undefined_param.json create mode 100644 tests/utils.test.js diff --git a/package.json b/package.json index 2016426..54412e9 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,7 @@ "assets/subtype-schemas.json" ], "dependencies": { - "@openeo/js-commons": "^1.1.0", + "@openeo/js-commons": "^1.1.1", "ajv": "^6.12.0" }, "devDependencies": { @@ -44,7 +44,7 @@ "docs": "jsdoc src -r -d docs/ -P package.json -R README.md", "build": "npx webpack", "compat": "jshint src", - "test": "jest --testPathIgnorePatterns assets/ --env=jsdom", - "test_node": "jest --testPathIgnorePatterns assets/ --env=node" + "test": "jest --env=jsdom", + "test_node": "jest --env=node" } } diff --git a/src/error.js b/src/error.js index df0de2d..9cff8a3 100644 --- a/src/error.js +++ b/src/error.js @@ -1,4 +1,4 @@ -const Utils = require('@openeo/js-commons/src/utils.js'); +const Utils = require('./utils'); const MESSAGES = { "MultipleResultNodes": "Multiple result nodes specified for process graph.", @@ -20,7 +20,12 @@ const MESSAGES = { "ProcessMissing": "No process specified" }; -module.exports = class ProcessGraphError extends Error { +/** + * An error class for this library. + * + * @class + */ +class ProcessGraphError extends Error { constructor(codeOrMsg, variables = {}) { super(); @@ -42,4 +47,6 @@ module.exports = class ProcessGraphError extends Error { }; } -}; \ No newline at end of file +} + +module.exports = ProcessGraphError; \ No newline at end of file diff --git a/src/errorlist.js b/src/errorlist.js index 0196d9f..8b8cfbb 100644 --- a/src/errorlist.js +++ b/src/errorlist.js @@ -1,4 +1,9 @@ -module.exports = class ErrorList { +/** + * A list of errors. + * + * @class + */ +class ErrorList { constructor() { this.errors = []; @@ -50,4 +55,6 @@ module.exports = class ErrorList { return this.errors; } -}; \ No newline at end of file +} + +module.exports = ErrorList; \ No newline at end of file diff --git a/src/jsonschema.js b/src/jsonschema.js index c1216d5..63743ef 100644 --- a/src/jsonschema.js +++ b/src/jsonschema.js @@ -1,12 +1,17 @@ const ajv = require('ajv'); -const Utils = require('@openeo/js-commons/src/utils.js'); +const Utils = require('./utils'); const ProcessUtils = require('@openeo/js-commons/src/processUtils.js'); const keywords = require('./keywords'); var geoJsonSchema = require("../assets/GeoJSON.json"); var subtypeSchemas = require("../assets/subtype-schemas.json"); -module.exports = class JsonSchemaValidator { +/** + * JSON Schema Validator. + * + * @class + */ +class JsonSchemaValidator { constructor() { this.ajv = new ajv({ @@ -32,6 +37,7 @@ module.exports = class JsonSchemaValidator { output: null }; this.epsgCodes = null; + this.processRegistry = null; } getFunctionName(subtype) { @@ -173,8 +179,24 @@ module.exports = class JsonSchemaValidator { async validateWkt2Definition(data) { // To be overridden by end-user application, just doing a very basic check here based on code ported over from proj4js - var codeWords = ['PROJECTEDCRS', 'PROJCRS', 'GEOGCS','GEOCCS','PROJCS','LOCAL_CS', 'GEODCRS', 'GEODETICCRS', 'GEODETICDATUM', 'ENGCRS', 'ENGINEERINGCRS']; - return codeWords.some(word => data.indexOf(word) > -1); + var codeWords = [ + 'BOUNDCRS', + 'COMPOUNDCRS', + 'ENGCRS', 'ENGINEERINGCRS', + 'GEODCRS', 'GEODETICCRS', + 'GEOGCRS', 'GEOGRAPHICCRS', + 'PARAMETRICCRS', + 'PROJCRS', 'PROJECTEDCRS', + 'TIMECRS', + 'VERTCRS', 'VERTICALCRS' + ]; + data = data.toUpperCase(); + if (!codeWords.some(word => data.indexOf(word) !== -1)) { + throw new ajv.ValidationError([{ + message: "Invalid WKT2 string specified." + }]); + } + return true; } async validateTemporalInterval(data) { @@ -203,11 +225,27 @@ module.exports = class JsonSchemaValidator { return true; } + setProcessGraphParser(processGraph) { + this.processGraph = processGraph; + } + async validateProcessGraph(data) { - const ProcessGraph = require('./processgraph'); // Avoid circular reference - var parser = new ProcessGraph(data); - parser.parse(); - return true; + try { + var parser; + if (this.processGraph) { + this.processGraph.createProcessGraphInstance(data); + } + else { + const ProcessGraph = require('./processgraph'); + parser = new ProcessGraph(data, null, this); + } + parser.parse(); + return true; + } catch (error) { + throw new ajv.ValidationError([{ + message: error.message + }]); + } } // Checks whether the valueSchema is compatible to the paramSchema. @@ -255,4 +293,6 @@ module.exports = class JsonSchemaValidator { return compatible !== -1; } -}; \ No newline at end of file +} + +module.exports = JsonSchemaValidator; \ No newline at end of file diff --git a/src/main.js b/src/main.js index 4656129..c72015a 100644 --- a/src/main.js +++ b/src/main.js @@ -5,6 +5,7 @@ const ProcessGraph = require('./processgraph'); const ProcessGraphError = require('./error'); const ProcessGraphNode = require('./node'); const ProcessRegistry = require('./registry'); +const Utils = require('./utils'); module.exports = { BaseProcess, @@ -13,5 +14,6 @@ module.exports = { ProcessGraph, ProcessGraphError, ProcessGraphNode, - ProcessRegistry + ProcessRegistry, + Utils }; \ No newline at end of file diff --git a/src/node.js b/src/node.js index bb53b47..3fbbaca 100644 --- a/src/node.js +++ b/src/node.js @@ -1,7 +1,12 @@ const ProcessGraphError = require('./error'); -const Utils = require('@openeo/js-commons/src/utils.js'); +const Utils = require('./utils'); -module.exports = class ProcessGraphNode { +/** + * A Process graph node. + * + * @class + */ +class ProcessGraphNode { constructor(node, id, parent = null) { if (typeof id !== 'string' || id.length === 0) { @@ -24,7 +29,7 @@ module.exports = class ProcessGraphNode { this.expectsFrom = []; // From which node do we expect results from this.receivedFrom = []; // From which node have received results from so far this.passesTo = []; - this.computedResult = null; + this.computedResult = undefined; } toJSON() { @@ -57,7 +62,7 @@ module.exports = class ProcessGraphNode { } getArgumentType(name) { - return ProcessGraphNode.getType(this.arguments[name]); + return Utils.getType(this.arguments[name]); } getRawArgument(name) { @@ -66,7 +71,7 @@ module.exports = class ProcessGraphNode { getRawArgumentValue(name) { var arg = this.getRawArgument(name); - switch(ProcessGraphNode.getType(arg)) { + switch(Utils.getType(arg)) { case 'result': return arg.from_node; case 'callback': @@ -86,32 +91,11 @@ module.exports = class ProcessGraphNode { } getArgumentRefs(name) { - return ProcessGraphNode.getValueRefs(this.arguments[name]); + return Utils.getRefs(this.arguments[name], false); } getRefs() { - return ProcessGraphNode.getValueRefs(this.arguments); - } - - static getValueRefs(value) { - var store = []; - var type = ProcessGraphNode.getType(value); - switch(type) { - case 'result': - case 'parameter': - store.push(value); - break; - case 'callback': - // ToDo - break; - case 'array': - case 'object': - for(var i in value) { - store = store.concat(ProcessGraphNode.getValueRefs(value[i])); - } - break; - } - return store; + return Utils.getRefs(this.arguments, false); } getProcessGraphParameter(name) { @@ -135,15 +119,17 @@ module.exports = class ProcessGraphNode { return defaultValue; } - throw new ProcessGraphError('ProcessGraphParameterMissing', { - argument: name, - node_id: this.id, - process_id: this.process_id - }); + if (!this.processGraph.allowUndefinedParameterRefs) { + throw new ProcessGraphError('ProcessGraphParameterMissing', { + argument: name, + node_id: this.id, + process_id: this.process_id + }); + } } evaluateArgument(arg) { - var type = ProcessGraphNode.getType(arg); + var type = Utils.getType(arg); switch(type) { case 'result': return this.processGraph.getNode(arg.from_node).getResult(); @@ -153,65 +139,51 @@ module.exports = class ProcessGraphNode { return this.getProcessGraphParameter(arg.from_parameter); case 'array': case 'object': + let copy = type === 'array' ? [] : {}; for(var i in arg) { - arg[i] = this.evaluateArgument(arg[i]); + copy[i] = this.evaluateArgument(arg[i]); } - return arg; + return copy; default: return arg; } } - static getType(obj, reportNullAs = 'null') { - const ProcessGraph = require('./processgraph'); - if (typeof obj === 'object') { - if (obj === null) { - return reportNullAs; - } - else if (Array.isArray(obj)) { - return 'array'; - } - else if(obj.hasOwnProperty("process_graph") || obj instanceof ProcessGraph) { - return 'callback'; - } - else if(obj.hasOwnProperty("from_node")) { - return 'result'; - } - else if(obj.hasOwnProperty("from_parameter")) { - return 'parameter'; - } - else { - return 'object'; - } - } - return (typeof obj); - } - isStartNode() { return (this.expectsFrom.length === 0); } addPreviousNode(node) { - this.expectsFrom.push(node); + if (!this.expectsFrom.find(other => other.id === node.id)) { + this.expectsFrom.push(node); + } } getPreviousNodes() { - return this.expectsFrom; + // Sort nodes to ensure a consistent execution order + return this.expectsFrom.sort((a,b) => a.id.localeCompare(b.id)); } addNextNode(node) { - this.passesTo.push(node); + if (!this.passesTo.find(other => other.id === node.id)) { + this.passesTo.push(node); + } } getNextNodes() { - return this.passesTo; + // Sort nodes to ensure a consistent execution order + return this.passesTo.sort((a,b) => a.id.localeCompare(b.id)); } reset() { - this.computedResult = null; + this.computedResult = undefined; this.receivedFrom = []; } + getDescription() { + return this.description; + } + setDescription(description) { if (typeof description === 'string') { this.description = description; @@ -236,4 +208,6 @@ module.exports = class ProcessGraphNode { return (this.expectsFrom.length === this.receivedFrom.length); // all dependencies solved? } -}; \ No newline at end of file +} + +module.exports = ProcessGraphNode; \ No newline at end of file diff --git a/src/process.js b/src/process.js index 13d69fa..d050553 100644 --- a/src/process.js +++ b/src/process.js @@ -1,9 +1,15 @@ const JsonSchemaValidator = require('./jsonschema'); const ProcessGraphError = require('./error'); -const ProcessGraphNode = require('./node'); const ProcessGraph = require('./processgraph'); +const ProcessUtils = require('@openeo/js-commons/src/processUtils'); +const Utils = require('./utils'); -module.exports = class BaseProcess { +/** + * Base Process class + * + * @class + */ +class BaseProcess { constructor(spec) { this.spec = spec; // Keep original specification data @@ -34,6 +40,11 @@ module.exports = class BaseProcess { if (await this.validateArgument(arg, node, param.name, param)) { continue; } + throw new ProcessGraphError('ProcessArgumentInvalid', { + process: this.id, + argument: param.name, + reason: "Can't validate argument" + }); } } @@ -42,7 +53,7 @@ module.exports = class BaseProcess { await arg.validate(true); return true; } - let argType = ProcessGraphNode.getType(arg); + let argType = Utils.getType(arg); switch(argType) { // Check whether parameter is required case 'undefined': @@ -57,26 +68,62 @@ module.exports = class BaseProcess { case 'parameter': var cbParam = node.getProcessGraph().getCallbackParameter(arg.from_parameter); if (cbParam) { - return JsonSchemaValidator.isSchemaCompatible(param.schema, cbParam); + if (!JsonSchemaValidator.isSchemaCompatible(param.schema, cbParam)) { + throw new ProcessGraphError('ProcessArgumentInvalid', { + process: this.id, + argument: parameterName, + reason: "Schema for parameter '" + arg.from_parameter + "' not compatible" + }); + } + else { + return true; // Parameter not available, nothing to validate against + } } - // Parameter not set, nothing to validate against - return true; + else { + node.getProcessGraphParameter(arg.from_parameter); + return true; + } // jshint ignore:line case 'result': - try { - var pg = node.getProcessGraph(); - var process_id = pg.getNode(arg.from_node).process_id; - var process = pg.getProcess(process_id); - return JsonSchemaValidator.isSchemaCompatible(param.schema, process.returns.schema); - } catch (e) {} - break; + var pg = node.getProcessGraph(); + var resultNode = pg.getNode(arg.from_node); + var process = pg.getProcess(resultNode); + if (JsonSchemaValidator.isSchemaCompatible(param.schema, process.returns.schema)) { + return true; + } + throw new ProcessGraphError('ProcessArgumentInvalid', { + process: this.id, + argument: parameterName, + reason: "Schema for result '" + arg.from_node + "' not compatible" + }); case 'array': case 'object': - // ToDo: Check how we can validate arrays and objects that have references to callback arguments and node results in them... - // See issue https://github.com/Open-EO/openeo-js-processgraphs/issues/2 -// for(var i in arg) { -// await this.validateArgument(arg[i], node, parameterName, param); -// } - return true; + if (Utils.containsRef(arg)) { + // This tries to at least be compliant to one of the element schemas + // It's better than validating nothing, but it's still not 100% correct + let schemas = ProcessUtils.normalizeJsonSchema(param.schema); + for(var key in arg) { + let elementSchema = schemas.map(schema => ProcessUtils.getElementJsonSchema(schema, key)).filter(schema => Object.keys(schema).length); // jshint ignore:line + if (elementSchema.length > 0) { + let validated = 0; + let lastError = null; + for(let schema of elementSchema) { + try { + await this.validateArgument(arg[key], node, parameterName + "." + key, {schema}); + validated++; + } catch (error) { + lastError = error; + } + } + if (validated === 0 && lastError) { + throw lastError; + } + } + } + return true; + } + else { + // Use default behavior below, so no break; needed + } // jshint ignore:line default: let validator = node.getProcessGraph().getJsonSchemaValidator(); // Validate against JSON schema @@ -90,7 +137,7 @@ module.exports = class BaseProcess { } } - return false; + return true; } /* istanbul ignore next */ @@ -104,4 +151,6 @@ module.exports = class BaseProcess { throw "test not implemented yet"; } -}; \ No newline at end of file +} + +module.exports = BaseProcess; \ No newline at end of file diff --git a/src/processgraph.js b/src/processgraph.js index a51bedd..e02ea41 100644 --- a/src/processgraph.js +++ b/src/processgraph.js @@ -2,23 +2,22 @@ const ErrorList = require('./errorlist'); const JsonSchemaValidator = require('./jsonschema'); const ProcessGraphError = require('./error'); const ProcessGraphNode = require('./node'); -const Utils = require('@openeo/js-commons/src/utils.js'); +const Utils = require('./utils'); const ProcessUtils = require('@openeo/js-commons/src/processUtils.js'); -module.exports = class ProcessGraph { +/** + * Process parser, validator and executor. + * + * @class + */ +class ProcessGraph { // ToDo: Also parse and validate other parts of the process, e.g. id, parameters, etc. constructor(process, processRegistry = null, jsonSchemaValidator = null) { - this.process = Utils.isObject(process) ? Utils.deepClone(process) : {}; - if (!Utils.isObject(this.process.process_graph)) { - this.process.process_graph = {}; - } - this.processRegistry = processRegistry; - this.jsonSchemaValidator = jsonSchemaValidator; - this.allowEmptyGraph = false; + this.process = process; this.nodes = {}; - this.startNodes = {}; + this.startNodes = []; this.resultNode = null; this.children = []; this.parentNode = null; @@ -27,6 +26,12 @@ module.exports = class ProcessGraph { this.validated = false; this.errors = new ErrorList(); this.arguments = {}; + // Sub process graphs need to copy these: + this.processRegistry = processRegistry; + this.jsonSchemaValidator = jsonSchemaValidator; + this.allowEmptyGraph = false; + this.fillParameters = false; + this.allowUndefinedParameterRefs = true; } toJSON() { @@ -37,6 +42,7 @@ module.exports = class ProcessGraph { if (this.jsonSchemaValidator === null) { this.jsonSchemaValidator = this.createJsonSchemaValidatorInstance(); } + this.jsonSchemaValidator.setProcessGraphParser(this); return this.jsonSchemaValidator; } @@ -49,7 +55,15 @@ module.exports = class ProcessGraph { } createProcessGraphInstance(process) { - return new ProcessGraph(process, this.processRegistry, this.getJsonSchemaValidator()); + let pg = new ProcessGraph(process, this.processRegistry, this.getJsonSchemaValidator()); + return this.copyProcessGraphInstanceProperties(pg); + } + + copyProcessGraphInstanceProperties(pg) { + pg.allowEmptyGraph = this.allowEmptyGraph; + pg.fillParameters = this.fillParameters; + pg.allowUndefinedParameterRefs = this.allowUndefinedParameterRefs; + return pg; } getParentNode() { @@ -81,6 +95,17 @@ module.exports = class ProcessGraph { this.errors.add(error); } + allowUndefinedParameters(allow = true) { + this.allowUndefinedParameterRefs = allow; + } + + fillUndefinedParameters(fill = true) { + if (fill) { + this.allowUndefinedParameterRefs = true; + } + this.fillParameters = fill; + } + allowEmpty(allow = true) { this.allowEmptyGraph = allow; } @@ -124,7 +149,6 @@ module.exports = class ProcessGraph { for(let id in this.nodes) { var node = this.nodes[id]; - if (node.isResultNode) { if (this.resultNode !== null) { throw makeError('MultipleResultNodes'); @@ -134,19 +158,28 @@ module.exports = class ProcessGraph { this.parseNodeArguments(id, node); } - if (!this.findStartNodes()) { - throw makeError('StartNodeMissing'); - } - else if (this.resultNode === null) { + if (this.resultNode === null) { throw makeError('ResultNodeMissing'); } + // Find/Cache start nodes, only possible after parseNodeArguments have been called for all nodes + // Sort nodes to ensure a consistent execution order + this.startNodes = Object.values(this.nodes).filter(node => node.isStartNode()).sort((a,b) => a.id.localeCompare(b.id)); + if (this.startNodes.length === 0) { + throw makeError('StartNodeMissing'); + } + this.parsed = true; } async validate(throwOnErrors = true) { if (this.validated) { - return null; + if (throwOnErrors && this.errors.count() > 0) { + throw this.errors.first(); + } + else { + return this.errors; + } } this.validated = true; @@ -167,6 +200,7 @@ module.exports = class ProcessGraph { } async execute(parameters = null) { + this.allowUndefinedParameters(false); await this.validate(); this.reset(); this.setArguments(parameters); @@ -210,7 +244,9 @@ module.exports = class ProcessGraph { async validateNode(node) { var process = this.getProcess(node); - return await process.validate(node); + if (process) { + return await process.validate(node); + } } async executeNodes(nodes, previousNode = null) { @@ -246,7 +282,7 @@ module.exports = class ProcessGraph { } for(var argumentName in args) { var arg = args[argumentName]; - var type = ProcessGraphNode.getType(arg); + var type = Utils.getType(arg); switch(type) { case 'result': this.connectNodes(node, arg.from_node); @@ -255,7 +291,7 @@ module.exports = class ProcessGraph { args[argumentName] = this.createProcessGraph(arg, node, argumentName); break; case 'parameter': - if (!this.hasParameter(arg.from_parameter) && !this.getCallbackParameter(arg.from_parameter)) { + if (this.fillParameters && !this.hasParameter(arg.from_parameter) && !this.getCallbackParameter(arg.from_parameter)) { this.addParameter(arg.from_parameter); } break; @@ -331,18 +367,6 @@ module.exports = class ProcessGraph { prevNode.addNextNode(node); } - findStartNodes() { - var found = false; - for(var id in this.nodes) { - var node = this.nodes[id]; - if (node.isStartNode()) { - this.startNodes[id] = node; - found = true; - } - } - return found; - } - reset() { for(var id in this.nodes) { this.nodes[id].reset(); @@ -355,15 +379,15 @@ module.exports = class ProcessGraph { } getStartNodes() { - return Object.values(this.startNodes); + return this.startNodes; } getStartNodeIds() { - return Object.keys(this.startNodes); + return this.startNodes.map(node => node.id); } getNode(nodeId) { - return this.nodes[nodeId]; + return nodeId in this.nodes ? this.nodes[nodeId] : null; } getNodeCount() { @@ -378,13 +402,23 @@ module.exports = class ProcessGraph { return this.errors; } - getProcess(node) { + /** + * Gets the process for the given process ID or node. + * + * @param {ProcessGraphNode|string} id + * @returns {object|null} + * @throws {ProcessGraphError} - ProcessUnsupported + */ + getProcess(id) { if (this.processRegistry === null) { return null; } - var process = this.processRegistry.get(node.process_id); + if (id instanceof ProcessGraphNode) { + id = id.process_id; + } + var process = this.processRegistry.get(id); if (process === null) { - throw new ProcessGraphError('ProcessUnsupported', {process: node.process_id}); + throw new ProcessGraphError('ProcessUnsupported', {process: id}); } return process; } @@ -412,4 +446,6 @@ module.exports = class ProcessGraph { return ProcessUtils.getCallbackParametersForProcess(this.getParentProcess(), this.parentParameterName); } -}; \ No newline at end of file +} + +module.exports = ProcessGraph; \ No newline at end of file diff --git a/src/registry.js b/src/registry.js index 26fd8de..f2414c5 100644 --- a/src/registry.js +++ b/src/registry.js @@ -1,7 +1,12 @@ const Process = require('./process'); -const Utils = require('@openeo/js-commons/src/utils.js'); +const Utils = require('./utils'); -module.exports = class ProcessRegistry { +/** + * Central registry for processes. + * + * @class + */ +class ProcessRegistry { constructor(processes = []) { // Keys added to this object must be lowercase! @@ -19,7 +24,9 @@ module.exports = class ProcessRegistry { if (!Utils.isObject(process)) { throw new Error("Invalid process; not an object."); } - if (typeof process.toJSON === 'function') { + + let isImpl = process instanceof Process; + if (!isImpl && typeof process.toJSON === 'function') { var json = process.toJSON(); if (Utils.isObject(json)) { process = json; @@ -28,7 +35,7 @@ module.exports = class ProcessRegistry { if (typeof process.id !== 'string') { throw new Error("Invalid process; no id specified."); } - this.processes[process.id.toLowerCase()] = new Process(process); + this.processes[process.id.toLowerCase()] = isImpl ? process : new Process(process); } count() { @@ -53,4 +60,6 @@ module.exports = class ProcessRegistry { return Object.values(this.processes).map(impl => impl.toJSON()); } -}; \ No newline at end of file +} + +module.exports = ProcessRegistry; \ No newline at end of file diff --git a/src/utils.js b/src/utils.js new file mode 100644 index 0000000..d5555b4 --- /dev/null +++ b/src/utils.js @@ -0,0 +1,104 @@ +const CommonUtils = require('@openeo/js-commons/src/utils.js'); + +/** + * Utilities + * + * @class + */ +class Utils extends CommonUtils { + + /** + * Checks whether a value contains references (from_parameter, from_node). + * + * @param {*} value + * @param {boolean} checkCallbacks - Set to `true` to also check for refs in callbacks. + * @returns {boolean} + */ + static containsRef(value, checkCallbacks = false, depth = 0) { + let type = Utils.getType(value); + switch(type) { + case 'result': + case 'parameter': + return true; + case 'callback': + if ((depth === 0 || checkCallbacks) && Utils.containsRef(value.process_graph, checkCallbacks, depth+1)) { + return true; + } + break; + case 'array': + case 'object': + for(let key in value) { + if (Utils.containsRef(value[key], checkCallbacks, depth)) { + return true; + } + } + } + return false; + } + + /** + * Returns all distinct references (from_parameter, from_node) contained in a value. + * + * @param {*} value + * @param {boolean} getFromCallbacks - Set to `true` to also include refs in callbacks. + * @returns {boolean} + */ + static getRefs(value, getFromCallbacks = false, depth = 0) { + var store = []; + var type = Utils.getType(value); + switch(type) { + case 'result': + case 'parameter': + store.push(value); + break; + case 'callback': + if (depth === 0 || getFromCallbacks) { + store = store.concat(Utils.getRefs(value.process_graph, getFromCallbacks, depth+1)); + } + break; + case 'array': + case 'object': + for(var key in value) { + store = store.concat(Utils.getRefs(value[key], getFromCallbacks , depth)); + } + break; + } + return Utils.unique(store, true); + } + + /** + * Returns the type of the value. + * + * Similar to typeof, but gives more details for objects (array, parameter, callback, result, null, object). + * + * @param {*} value + * @returns {string} + */ + static getType(value) { + const ProcessGraph = require('./processgraph'); + if (typeof value === 'object') { + if (value === null) { + return 'null'; + } + else if (Array.isArray(value)) { + return 'array'; + } + else if(value.hasOwnProperty("process_graph") || value instanceof ProcessGraph) { + return 'callback'; + } + else if(value.hasOwnProperty("from_node")) { + return 'result'; + } + else if(value.hasOwnProperty("from_parameter")) { + return 'parameter'; + } + else { + return 'object'; + } + } + return (typeof value); + } + +} + +module.exports = Utils; \ No newline at end of file diff --git a/tests/assets/invalid_args.json b/tests/assets/invalid_args.json new file mode 100644 index 0000000..ac7a973 --- /dev/null +++ b/tests/assets/invalid_args.json @@ -0,0 +1,18 @@ +{ + "process_graph":{ + "dc":{ + "process_id":"load_collection", + "arguments":{ + "id":"S2", + "spatial_extent":{ + "west":"16.1", + "east":16.6, + "north":48.6, + "south":47.2 + }, + "temporal_extent":null + }, + "result": true + } + } +} \ No newline at end of file diff --git a/tests/assets/param_in_obj_arg.json b/tests/assets/param_in_obj_arg.json new file mode 100644 index 0000000..e5fe57b --- /dev/null +++ b/tests/assets/param_in_obj_arg.json @@ -0,0 +1,18 @@ +{ + "process_graph":{ + "dc":{ + "process_id":"load_collection", + "arguments":{ + "id":"S2", + "spatial_extent":{ + "west":{"from_parameter": "west"}, + "east":{"from_parameter": "east"}, + "north":{"from_parameter": "north"}, + "south":{"from_parameter": "south"} + }, + "temporal_extent":null + }, + "result": true + } + } +} \ No newline at end of file diff --git a/tests/assets/processes.json b/tests/assets/processes.json index 8e0d31b..891669b 100644 --- a/tests/assets/processes.json +++ b/tests/assets/processes.json @@ -149,7 +149,7 @@ } ], "returns": { - "description": "The data cube with a newly added dimension.", + "description": "The data cube with a newly added dimension. The new dimension has exactly one dimension label. All other dimensions remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -344,7 +344,7 @@ }, "links": [ { - "href": "https://open-eo.github.io/openeo-api/glossary/#aggregation-and-resampling", + "href": "https://openeo.org/documentation/1.0/glossary.html#aggregate-reducing-resolution", "rel": "about", "title": "Aggregation explained in the openEO glossary" }, @@ -447,7 +447,7 @@ }, "links": [ { - "href": "https://open-eo.github.io/openeo-api/glossary/#aggregation-and-resampling", + "href": "https://openeo.org/documentation/1.0/glossary.html#aggregate-reducing-resolution", "rel": "about", "title": "Aggregation explained in the openEO glossary" }, @@ -458,10 +458,125 @@ } ] }, + { + "id": "aggregate_temporal_period", + "summary": "Temporal aggregations based on calendar hierarchies", + "description": "Computes a temporal aggregation based on calendar hierarchies such as years, months or seasons. For other calendar hierarchies ``aggregate_temporal()`` can be used.\n\nFor each interval, all data along the dimension will be passed through the reducer.\n\nIf the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension.", + "categories": [ + "aggregate & resample", + "climatology", + "cubes" + ], + "parameters": [ + { + "name": "data", + "description": "A data cube.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + { + "name": "period", + "description": "The time intervals to aggregate. The following pre-defined values are available:\n\n* `hour`: Hour of the day\n* `day`: Day of the year\n* `week`: Week of the year\n* `dekad`: Ten day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each year.\n* `month`: Month of the year\n* `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November).\n* `tropical-season`: Six month periods of the tropical seasons (November - April, May - October).\n* `year`: Proleptic years\n* `decade`: Ten year periods ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next year ending in a 9.\n* `decade-ad`: Ten year periods ([1-to-0 decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the Anno Domini (AD) calendar era, from a year ending in a 1 to the next year ending in a 0.", + "schema": { + "type": "string", + "enum": [ + "hour", + "day", + "week", + "dekad", + "month", + "season", + "tropical-season", + "year", + "decade", + "decade-ad" + ] + } + }, + { + "name": "reducer", + "description": "A reducer to be applied on all values along the specified dimension. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes.", + "schema": { + "type": "object", + "subtype": "process-graph", + "parameters": [ + { + "name": "data", + "description": "A labeled array with elements of any type.", + "schema": { + "type": "array", + "subtype": "labeled-array", + "items": { + "description": "Any data type." + } + } + }, + { + "name": "context", + "description": "Additional data passed by the user.", + "schema": { + "description": "Any data type." + }, + "optional": true, + "default": null + } + ] + } + }, + { + "name": "dimension", + "description": "The name of the temporal dimension for aggregation. All data along the dimension is passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.", + "schema": { + "type": [ + "string", + "null" + ] + }, + "optional": true, + "default": null + }, + { + "name": "context", + "description": "Additional data to be passed to the reducer.", + "schema": { + "description": "Any data type." + }, + "optional": true, + "default": null + } + ], + "returns": { + "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension. The specified temporal dimension has the following dimension labels (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month):\n\n* `hour`: `YYYY-MM-DD-00` - `YYYY-MM-DD-23`\n* `day`: `YYYY-001` - `YYYY-365`\n* `week`: `YYYY-01` - `YYYY-52`\n* `dekad`: `YYYY-00` - `YYYY-36`\n* `month`: `YYYY-01` - `YYYY-12`\n* `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November).\n* `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October).\n* `year`: `YYYY`\n* `decade`: `YYY0`\n* `decade-ad`: `YYY1`", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + "exceptions": { + "TooManyDimensions": { + "message": "The data cube contains multiple temporal dimensions. The parameter `dimension` must be specified." + }, + "DimensionNotAvailable": { + "message": "A dimension with the specified name does not exist." + }, + "DistinctDimensionLabelsRequired": { + "message": "The dimension labels have duplicate values. Distinct labels must be specified." + } + }, + "links": [ + { + "href": "https://openeo.org/documentation/1.0/glossary.html#aggregate-reducing-resolution", + "rel": "about", + "title": "Aggregation explained in the openEO glossary" + } + ] + }, { "id": "aggregate_temporal", "summary": "Temporal aggregations", - "description": "Computes a temporal aggregation based on an array of temporal intervals.\n\nCalendar hierarchies such as year, month, week etc. must be transformed into specific intervals by the clients. For each interval, all data along the dimension will be passed through the reducer.\n\nThe computed values will be projected to the labels. If no labels are specified, the start of the temporal interval will be used as label for the corresponding values. In case of a conflict (i.e. the user-specified values for the start times of the temporal intervals are not distinct), the user-defined labels must be specified in the parameter `labels` as otherwise a `DistinctDimensionLabelsRequired` error would be thrown. The number of user-defined labels and the number of intervals need to be equal.\n\nIf the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension.", + "description": "Computes a temporal aggregation based on an array of temporal intervals.\n\nFor common regular calendar hierarchies such as year, month, week or seasons ``aggregate_temporal_period()`` can be used. Other calendar hierarchies must be transformed into specific intervals by the clients.\n\nFor each interval, all data along the dimension will be passed through the reducer.\n\nThe computed values will be projected to the labels. If no labels are specified, the start of the temporal interval will be used as label for the corresponding values. In case of a conflict (i.e. the user-specified values for the start times of the temporal intervals are not distinct), the user-defined labels must be specified in the parameter `labels` as otherwise a `DistinctDimensionLabelsRequired` error would be thrown. The number of user-defined labels and the number of intervals need to be equal.\n\nIf the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension.", "categories": [ "cubes", "aggregate & resample" @@ -504,6 +619,13 @@ "format": "time", "subtype": "time" }, + { + "type": "string", + "subtype": "year", + "minLength": 4, + "maxLength": 4, + "pattern": "^\\d{4}$" + }, { "type": "null" } @@ -589,7 +711,7 @@ }, { "name": "dimension", - "description": "The name of the temporal dimension for aggregation. All data along the dimension will be passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.\n\n**Note:** The default dimensions a data cube provides are described in the collection's metadata field `cube:dimensions`.", + "description": "The name of the temporal dimension for aggregation. All data along the dimension is passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.", "schema": { "type": [ "string", @@ -610,7 +732,7 @@ } ], "returns": { - "description": "A data cube with potentially lower resolution and potentially lower cardinality, but the same number of dimensions as the original data cube.", + "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension.", "schema": { "type": "object", "subtype": "raster-cube" @@ -618,72 +740,58 @@ }, "examples": [ { - "process_graph": { - "loadco1": { - "process_id": "load_collection", - "arguments": { - "id": "Sentinel-2", - "spatial_extent": { - "from_parameter": "spatial_extent" - }, - "temporal_extent": [ - "2015-01-01", - "2019-01-01" - ] - } + "arguments": { + "data": { + "from_parameter": "data" }, - "aggreg1": { - "process_id": "aggregate_temporal", - "arguments": { - "data": { - "from_node": "loadco1" - }, - "intervals": [ - [ - "2015-01-01", - "2016-01-01" - ], - [ - "2016-01-01", - "2017-01-01" - ], - [ - "2017-01-01", - "2018-01-01" - ], - [ - "2018-01-01", - "2019-01-01" - ] - ], - "labels": [ - "2015", - "2016", - "2017", - "2018" - ], - "reducer": { - "process_graph": { - "median1": { - "process_id": "mean", - "arguments": { - "data": { - "from_parameter": "data" - } - }, - "result": true + "intervals": [ + [ + "2015-01-01", + "2016-01-01" + ], + [ + "2016-01-01", + "2017-01-01" + ], + [ + "2017-01-01", + "2018-01-01" + ], + [ + "2018-01-01", + "2019-01-01" + ], + [ + "2019-01-01", + "2020-01-01" + ] + ], + "labels": [ + "2015", + "2016", + "2017", + "2018", + "2019" + ], + "reducer": { + "process_graph": { + "mean1": { + "process_id": "mean", + "arguments": { + "data": { + "from_parameter": "data" } - } + }, + "result": true } - }, - "result": true + } } } } ], "exceptions": { "TooManyDimensions": { - "message": "The number of dimensions must be reduced to three for 'aggregate_temporal'." + "message": "The data cube contains multiple temporal dimensions. The parameter `dimension` must be specified." }, "DimensionNotAvailable": { "message": "A dimension with the specified name does not exist." @@ -694,7 +802,7 @@ }, "links": [ { - "href": "https://open-eo.github.io/openeo-api/glossary/#aggregation-and-resampling", + "href": "https://openeo.org/documentation/1.0/glossary.html#aggregate-reducing-resolution", "rel": "about", "title": "Aggregation explained in the openEO glossary" } @@ -703,14 +811,14 @@ { "id": "all", "summary": "Are all of the values true?", - "description": "Checks if **all** of the values are true. Evaluates all values from the first to the last element and stops once the outcome is unambiguous.\n\nIf only one value is given, the process evaluates to the given value. If no value is given (i.e. the array is empty) the process returns `null`.\n\nBy default all no-data values are ignored so that the process returns `null` if all values are no-data, `true` if all other values are true and `false` otherwise. Setting the `ignore_nodata` flag to `false` considers no-data values so that `null` is a valid logical object. If a component is `null`, the result will be `null` if the outcome is ambiguous. See the following truth table:\n\n```\n || null | false | true\n----- || ----- | ----- | -----\nnull || null | false | null\nfalse || false | false | false\ntrue || null | false | true\n```", + "description": "Checks if **all** of the values in `data` are true. Evaluates all values from the first to the last element and stops once the outcome is unambiguous.\n\nIf only one value is given, the process evaluates to the given value. If no value is given (i.e. the array is empty) the process returns `null`.\n\nBy default all no-data values are ignored so that the process returns `null` if all values are no-data, `true` if all other values are true and `false` otherwise. Setting the `ignore_nodata` flag to `false` considers no-data values so that `null` is a valid logical object. If a component is `null`, the result will be `null` if the outcome is ambiguous. See the following truth table:\n\n```\n || null | false | true\n----- || ----- | ----- | -----\nnull || null | false | null\nfalse || false | false | false\ntrue || null | false | true\n```", "categories": [ "logic", "reducer" ], "parameters": [ { - "name": "values", + "name": "data", "description": "A set of boolean values.", "schema": { "type": "array", @@ -744,7 +852,7 @@ "examples": [ { "arguments": { - "values": [ + "data": [ false, null ] @@ -753,7 +861,7 @@ }, { "arguments": { - "values": [ + "data": [ true, null ] @@ -762,7 +870,7 @@ }, { "arguments": { - "values": [ + "data": [ false, null ], @@ -772,7 +880,7 @@ }, { "arguments": { - "values": [ + "data": [ true, null ], @@ -782,7 +890,7 @@ }, { "arguments": { - "values": [ + "data": [ true, false, true, @@ -793,7 +901,7 @@ }, { "arguments": { - "values": [ + "data": [ true, false ] @@ -802,7 +910,7 @@ }, { "arguments": { - "values": [ + "data": [ true, true ] @@ -811,7 +919,7 @@ }, { "arguments": { - "values": [ + "data": [ true ] }, @@ -819,7 +927,7 @@ }, { "arguments": { - "values": [ + "data": [ null ], "ignore_nodata": false @@ -828,7 +936,7 @@ }, { "arguments": { - "values": [] + "data": [] }, "returns": null } @@ -927,17 +1035,72 @@ } } }, + { + "id": "anomaly", + "summary": "Computes anomalies", + "description": "Computes anomalies based on normals for temporal periods. It compares the data for each label in the temporal dimension with the corresponding data in the normals data cube by subtracting the normal from the data.", + "categories": [ + "climatology", + "math" + ], + "parameters": [ + { + "name": "data", + "description": "A data cube with exactly one temporal dimension and the following dimension labels for the given period (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month):\n\n* `hour`: `YYYY-MM-DD-00` - `YYYY-MM-DD-23`\n* `day`: `YYYY-001` - `YYYY-365`\n* `week`: `YYYY-01` - `YYYY-52`\n* `dekad`: `YYYY-00` - `YYYY-36`\n* `month`: `YYYY-01` - `YYYY-12`\n* `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November).\n* `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October).\n* `year`: `YYYY`\n* `decade`: `YYY0`\n* `decade-ad`: `YYY1`\n* `single-period` / `climatology-period`: Any\n\n``aggregate_temporal_period()`` can compute such a data cube.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + { + "name": "normals", + "description": "A data cube with normals, e.g. daily, monthly or yearly values computed from a process such as ``climatological_normal()``. Must contain exactly one temporal dimension with the following dimension labels for the given period:\n\n* `hour`: `00` - `23`\n* `day`: `001` - `365`\n* `week`: `01` - `52`\n* `dekad`: `00` - `36`\n* `month`: `01` - `12`\n* `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November)\n* `tropical-season`: `ndjfma` (November - April), `mjjaso` (May - October)\n* `year`: Four-digit year numbers\n* `decade`: Four-digit year numbers, the last digit being a `0`\n* `decade-ad`: Four-digit year numbers, the last digit being a `1`\n* `single-period` / `climatology-period`: A single dimension label with any name is expected.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + { + "name": "period", + "description": "Specifies the time intervals available in the normals data cube. The following options are available:\n\n* `hour`: Hour of the day\n* `day`: Day of the year\n* `week`: Week of the year\n* `dekad`: Ten day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each year.\n* `month`: Month of the year\n* `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November).\n* `tropical-season`: Six month periods of the tropical seasons (November - April, May - October).\n* `year`: Proleptic years\n* `decade`: Ten year periods ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next year ending in a 9.\n* `decade-ad`: Ten year periods ([1-to-0 decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the Anno Domini (AD) calendar era, from a year ending in a 1 to the next year ending in a 0.\n* `single-period` / `climatology-period`: A single period of arbitrary length", + "schema": { + "type": "string", + "enum": [ + "hour", + "day", + "week", + "dekad", + "month", + "season", + "tropical-season", + "year", + "decade", + "decade-ad", + "climatology-period", + "single-period" + ] + } + } + ], + "returns": { + "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + } + }, { "id": "any", "summary": "Is at least one value true?", - "description": "Checks if **any** (i.e. at least one) value is `true`. Evaluates all values from the first to the last element and stops once the outcome is unambiguous.\n\nIf only one value is given, the process evaluates to the given value. If no value is given (i.e. the array is empty) the process returns `null`.\n\nBy default all no-data values are ignored so that the process returns `null` if all values are no-data, `true` if at least one of the other values is true and `false` otherwise. Setting the `ignore_nodata` flag to `false` considers no-data values so that `null` is a valid logical object. If a component is `null`, the result will be `null` if the outcome is ambiguous. See the following truth table:\n\n```\n || null | false | true\n----- || ---- | ----- | ----\nnull || null | null | true\nfalse || null | false | true\ntrue || true | true | true\n```", + "description": "Checks if **any** (i.e. at least one) value in `data` is `true`. Evaluates all values from the first to the last element and stops once the outcome is unambiguous.\n\nIf only one value is given, the process evaluates to the given value. If no value is given (i.e. the array is empty) the process returns `null`.\n\nBy default all no-data values are ignored so that the process returns `null` if all values are no-data, `true` if at least one of the other values is true and `false` otherwise. Setting the `ignore_nodata` flag to `false` considers no-data values so that `null` is a valid logical object. If a component is `null`, the result will be `null` if the outcome is ambiguous. See the following truth table:\n\n```\n || null | false | true\n----- || ---- | ----- | ----\nnull || null | null | true\nfalse || null | false | true\ntrue || true | true | true\n```", "categories": [ "logic", "reducer" ], "parameters": [ { - "name": "values", + "name": "data", "description": "A set of boolean values.", "schema": { "type": "array", @@ -971,7 +1134,7 @@ "examples": [ { "arguments": { - "values": [ + "data": [ false, null ] @@ -980,7 +1143,7 @@ }, { "arguments": { - "values": [ + "data": [ true, null ] @@ -989,7 +1152,7 @@ }, { "arguments": { - "values": [ + "data": [ false, null ], @@ -999,7 +1162,7 @@ }, { "arguments": { - "values": [ + "data": [ true, null ], @@ -1009,7 +1172,7 @@ }, { "arguments": { - "values": [ + "data": [ true, false, true, @@ -1020,7 +1183,7 @@ }, { "arguments": { - "values": [ + "data": [ true, false ] @@ -1029,7 +1192,7 @@ }, { "arguments": { - "values": [ + "data": [ false, false ] @@ -1038,7 +1201,7 @@ }, { "arguments": { - "values": [ + "data": [ true ] }, @@ -1046,7 +1209,7 @@ }, { "arguments": { - "values": [ + "data": [ null ], "ignore_nodata": false @@ -1055,7 +1218,7 @@ }, { "arguments": { - "values": [] + "data": [] }, "returns": null } @@ -1137,7 +1300,7 @@ } ], "returns": { - "description": "A data cube with the newly computed values for the specified. The resolution and the number of dimensions are the same as for the original data cube.", + "description": "A data cube with the newly computed values. All dimensions stay the same, except for the dimensions specified in corresponding parameters. There are three cases how the data cube changes:\n\n1. The source dimension **is** the target dimension:\n * The (number of) dimensions remain unchanged.\n * The source dimension properties name, type and reference system remain unchanged.\n * The dimension labels and the resolution are preserved when the number of pixel values in the source dimension is equal to the number of values computed by the process. The other case is described below.\n2. The source dimension **is not** the target dimension and the latter **exists**:\n * The number of dimensions decreases by one as the source dimension is dropped.\n * The target dimension properties name, type and reference system remain unchanged.\n * The resolution changes, the number of dimension labels is equal to the number of values computed by the process and the dimension labels are incrementing integers starting from zero\n3. The source dimension **is not** the target dimension and the latter **does not exist**:\n * The number of dimensions remain unchanged, but the source dimension is replaced with the target dimension.\n * The target dimension has the specified name and the type other. The reference system is not changed.\n * The resolution changes, the number of dimension labels is equal to the number of values computed by the process and the dimension labels are incrementing integers starting from zero\n\nFor all three cases except for the exception in the first case, the resolution changes, the number of dimension labels is equal to the number of values computed by the process and the dimension labels are incrementing integers starting from zero.", "schema": { "type": "object", "subtype": "raster-cube" @@ -1151,8 +1314,8 @@ }, { "id": "apply_kernel", - "summary": "Apply a kernel to compute pixel-wise values", - "description": "Applies a 2D convolution (i.e. a focal operation with a weighted kernel) on the horizontal spatial dimensions (axes `x` and `y`) of the data cube.\n\nEach value in the kernel is multiplied with the corresponding pixel value and all products are summed up afterwards. The sum is then multiplied with the factor.", + "summary": "Apply a spatial convolution with a kernel", + "description": "Applies a 2D convolution (i.e. a focal operation with a weighted kernel) on the horizontal spatial dimensions (axes `x` and `y`) of the data cube.\n\nEach value in the kernel is multiplied with the corresponding pixel value and all products are summed up afterwards. The sum is then multiplied with the factor.\n\nThe process can't handle non-numerical or infinite numerical values in the data cube. Boolean values are converted to integers (`false` = 0, `true` = 1), but all other non-numerical or infinite values are replaced with zeroes by default (see parameter `replace_invalid`).\n\nFor cases requiring more generic focal operations or non-numerical values, see ``apply_neighborhood()``.", "categories": [ "cubes", "math > image filter" @@ -1168,7 +1331,7 @@ }, { "name": "kernel", - "description": "A two-dimensional weighted kernel. Each dimension of the kernel must have an uneven number of elements, otherwise the process throws a `KernelDimensionsUneven` error.", + "description": "Kernel as a two-dimensional array of weights. The inner level of the nested array aligns with the `x` axis and the outer level aligns with the `y` axis. Each level of the kernel must have an uneven number of elements, otherwise the process throws a `KernelDimensionsUneven` error.", "schema": { "description": "A two-dimensional array of numbers.", "type": "array", @@ -1189,10 +1352,39 @@ }, "default": 1, "optional": true + }, + { + "name": "border", + "description": "Determines how the data is extended when the kernel overlaps with the borders. Defaults to fill the border with zeroes.\n\nThe following options are available:\n\n* *numeric value* - fill with a user-defined constant number `n`: `nnnnnn|abcdefgh|nnnnnn` (default, with `n` = 0)\n* `replicate` - repeat the value from the pixel at the border: `aaaaaa|abcdefgh|hhhhhh`\n* `reflect` - mirror/reflect from the border: `fedcba|abcdefgh|hgfedc`\n* `reflect_pixel` - mirror/reflect from the center of the pixel at the border: `gfedcb|abcdefgh|gfedcb`\n* `wrap` - repeat/wrap the image: `cdefgh|abcdefgh|abcdef`", + "schema": [ + { + "type": "string", + "enum": [ + "replicate", + "reflect", + "reflect_pixel", + "wrap" + ] + }, + { + "type": "number" + } + ], + "default": 0, + "optional": true + }, + { + "name": "replace_invalid", + "description": "This parameter specifies the value to replace non-numerical or infinite numerical values with. By default, those values are replaced with zeroes.", + "schema": { + "type": "number" + }, + "default": 0, + "optional": true } ], "returns": { - "description": "A data cube with the newly computed values. The resolution, cardinality and the number of dimensions are the same as for the original data cube.", + "description": "A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -1216,6 +1408,225 @@ } ] }, + { + "id": "apply_neighborhood", + "summary": "Apply a process to pixels in a n-dimensional neighbourhood", + "description": "Applies a focal process to a data cube.\n\nA focal process is a process that works on a 'neighbourhood' of pixels. The neighbourhood can extend into multiple dimensions, this extent is specified by the `size` argument. It is not only (part of) the size of the input window, but also the size of the output for a given position of the sliding window. The sliding window moves with multiples of `size`.\n\nAn overlap can be specified so that neighbourhoods can have overlapping boundaries. This allows for continuity of the output. The values included in the data cube as overlap can't be modified by the given `process`. The missing overlap at the borders of the original data cube are made available as no-data (`null`) in the sub data cubes.\n\nThe neighbourhood size should be kept small enough, to avoid running beyond computational resources, but a too small size will result in a larger number of process invocations, which may slow down processing. Window sizes for spatial dimensions typically are in the range of 64 to 512 pixels, while overlaps of 8 to 32 pixels are common.\n\nThe process must not add new dimensions, or remove entire dimensions, but the result can have different dimension labels.\n\nFor the special case of 2D convolution, it is recommended to use ``apply_kernel()``.", + "categories": [ + "cubes" + ], + "parameters": [ + { + "name": "data", + "description": "A data cube.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + { + "name": "process", + "description": "Process to be applied on all neighbourhoods.", + "schema": { + "type": "object", + "subtype": "process-graph", + "parameters": [ + { + "name": "data", + "description": "A subset of the data cube as specified in `context` and `overlap`.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + { + "name": "context", + "description": "Additional data passed by the user.", + "schema": { + "description": "Any data type." + }, + "optional": true, + "default": null + } + ] + } + }, + { + "name": "size", + "description": "Neighbourhood sizes along each dimension.\n\nThis object maps dimension names to either a physical measure (e.g. 100 m, 10 days) or pixels (e.g. 32 pixels). For dimensions not specified, the default is to provide all values. Be aware that including all values from overly large dimensions may not be processed at once.", + "schema": { + "type": "array", + "items": { + "type": "object", + "subtype": "chunk-size", + "required": [ + "dimension", + "value" + ], + "properties": { + "dimension": { + "type": "string" + }, + "value": { + "default": null, + "oneOf": [ + { + "type": "null", + "title": "All values" + }, + { + "type": "number", + "minimum": 0, + "description": "See the `unit` parameter for more information." + }, + { + "type": "string", + "subtype": "duration", + "description": "[ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations), e.g. `P1D` for one day.", + "pattern": "^(-?)P(?=\\d|T\\d)(?:(\\d+)Y)?(?:(\\d+)M)?(?:(\\d+)([DW]))?(?:T(?:(\\d+)H)?(?:(\\d+)M)?(?:(\\d+(?:\\.\\d+)?)S)?)?$" + } + ] + }, + "unit": { + "type": "string", + "description": "The unit the values are given in, either meters (`m`) or pixels (`px`). If no unit is given, uses the unit specified for the dimension or otherwise the default unit of the reference system.", + "enum": [ + "px", + "m" + ] + } + } + } + } + }, + { + "name": "overlap", + "description": "Overlap of neighbourhoods along each dimension to avoid border effects.\n\nFor instance a temporal dimension can add 1 month before and after a neighbourhood. In the spatial dimensions, this is often a number of pixels. The overlap specified is added before and after, so an overlap of 8 pixels will add 8 pixels on both sides of the window, so 16 in total.\n\nBe aware that large overlaps increase the need for computational resources and modifying overlapping data in subsequent operations have no effect.", + "optional": true, + "schema": { + "type": "array", + "items": { + "type": "object", + "subtype": "chunk-size", + "required": [ + "dimension", + "value" + ], + "properties": { + "dimension": { + "type": "string" + }, + "value": { + "default": null, + "oneOf": [ + { + "type": "null", + "title": "No values" + }, + { + "type": "number", + "minimum": 0, + "description": "See the `unit` parameter for more information." + }, + { + "type": "string", + "subtype": "duration", + "description": "[ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations), e.g. `P1D` for one day.", + "pattern": "^(-?)P(?=\\d|T\\d)(?:(\\d+)Y)?(?:(\\d+)M)?(?:(\\d+)([DW]))?(?:T(?:(\\d+)H)?(?:(\\d+)M)?(?:(\\d+(?:\\.\\d+)?)S)?)?$" + } + ] + }, + "unit": { + "type": "string", + "description": "The unit the values are given in, either meters (`m`) or pixels (`px`). If no unit is given, uses the unit specified for the dimension or otherwise the default unit of the reference system.", + "enum": [ + "px", + "m" + ] + } + } + } + } + }, + { + "name": "context", + "description": "Additional data to be passed to the process.", + "schema": { + "description": "Any data type." + }, + "optional": true, + "default": null + } + ], + "returns": { + "description": "A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + "examples": [ + { + "arguments": { + "data": { + "from_parameter": "data" + }, + "process": { + "process_graph": { + "udf": { + "process_id": "run_udf", + "arguments": { + "data": { + "from_parameter": "data" + }, + "udf": "ml.py", + "runtime": "Python" + }, + "result": true + } + } + }, + "size": [ + { + "dimension": "x", + "value": 128, + "unit": "px" + }, + { + "dimension": "y", + "value": 128, + "unit": "px" + }, + { + "dimension": "t", + "value": "P5D" + } + ], + "overlap": [ + { + "dimension": "x", + "value": 16, + "unit": "px" + }, + { + "dimension": "y", + "value": 16, + "unit": "px" + }, + { + "dimension": "t", + "value": "P3D" + } + ] + } + } + ], + "exceptions": { + "DimensionNotAvailable": { + "message": "A dimension with the specified name does not exist." + } + } + }, { "id": "apply", "summary": "Apply a process to each pixel", @@ -1269,7 +1680,7 @@ } ], "returns": { - "description": "A data cube with the newly computed values. The resolution, cardinality and the number of dimensions are the same as for the original data cube.", + "description": "A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -1310,26 +1721,6 @@ "x": 1 }, "returns": 0 - }, - { - "process_graph": { - "cos1": { - "process_id": "cos", - "arguments": { - "x": 0.5 - } - }, - "arccos1": { - "process_id": "arccos", - "arguments": { - "x": { - "from_node": "cos1" - } - }, - "result": true - } - }, - "returns": 0.5 } ], "links": [ @@ -1374,26 +1765,6 @@ "x": 1 }, "returns": 0 - }, - { - "process_graph": { - "cosh1": { - "process_id": "cosh", - "arguments": { - "x": 0.5 - } - }, - "arccosh1": { - "process_id": "arcosh", - "arguments": { - "x": { - "from_node": "cosh1" - } - }, - "result": true - } - }, - "returns": 0.5 } ], "links": [ @@ -1438,26 +1809,6 @@ "x": 0 }, "returns": 0 - }, - { - "process_graph": { - "sin1": { - "process_id": "sin", - "arguments": { - "x": 0.5 - } - }, - "arcsin1": { - "process_id": "arcsin", - "arguments": { - "x": { - "from_node": "sin1" - } - }, - "result": true - } - }, - "returns": 0.5 } ], "links": [ @@ -1502,26 +1853,6 @@ "x": 0 }, "returns": 0 - }, - { - "process_graph": { - "tan1": { - "process_id": "tan", - "arguments": { - "x": 0.5 - } - }, - "arctan1": { - "process_id": "arctan", - "arguments": { - "x": { - "from_node": "tan1" - } - }, - "result": true - } - }, - "returns": 0.5 } ], "links": [ @@ -1680,7 +2011,21 @@ "description": "Any data type is allowed." } } - } + }, + "links": [ + { + "rel": "example", + "type": "application/json", + "href": "https://processes.openeo.org/1.0.0/examples/array_find_nodata.json", + "title": "Find no-data values in arrays" + }, + { + "rel": "example", + "type": "application/json", + "href": "https://processes.openeo.org/1.0.0/examples/array_contains_nodata.json", + "title": "Check for no-data values in arrays" + } + ] }, { "id": "array_contains", @@ -1688,7 +2033,8 @@ "description": "Checks whether the array specified for `data` contains the value specified in `value`. Returns `true` if there's a match, otherwise `false`.\n\n**Remarks:**\n\n* To get the index or the label of the value found, use ``array_find()``.\n* All definitions for the process ``eq()`` regarding the comparison of values apply here as well. A `null` return value from ``eq()`` is handled exactly as `false` (no match).\n* Data types MUST be checked strictly, for example a string with the content *1* is not equal to the number *1*.\n* An integer *1* is equal to a floating point number *1.0* as `integer` is a sub-type of `number`. Still, this process may return unexpectedly `false` when comparing floating point numbers due to floating point inaccuracy in machine-based computation.\n* Temporal strings are treated as normal strings and MUST NOT be interpreted.\n* If the specified value is an array, object or null, the process always returns `false`. See the examples for one to check for `null` values.", "categories": [ "arrays", - "comparison" + "comparison", + "reducer" ], "parameters": [ { @@ -1716,41 +2062,6 @@ } }, "examples": [ - { - "title": "Check for `null` values", - "description": "Check whether the array contains a `null` value.", - "process_graph": { - "apply": { - "process_id": "array_apply", - "arguments": { - "data": { - "from_parameter": "data" - }, - "process": { - "process-graph": { - "is_null": { - "process_id": "is_nodata", - "arguments": { - "from_parameter": "x" - }, - "result": true - } - } - } - } - }, - "find": { - "process_id": "array_contains", - "arguments": { - "data": { - "from_node": "apply" - }, - "value": true - }, - "result": true - } - } - }, { "arguments": { "data": [ @@ -1847,6 +2158,14 @@ "returns": false } ], + "links": [ + { + "rel": "example", + "type": "application/json", + "href": "https://processes.openeo.org/1.0.0/examples/array_contains_nodata.json", + "title": "Check for no-data values in arrays" + } + ], "process_graph": { "find": { "process_id": "array_find", @@ -1883,7 +2202,8 @@ "summary": "Get an element from an array", "description": "Returns the element with the specified index or label from the array.\n\nEither the parameter `index` or `label` must be specified, otherwise the `ArrayElementParameterMissing` exception is thrown. If both parameters are set the `ArrayElementParameterConflict` exception is thrown.", "categories": [ - "arrays" + "arrays", + "reducer" ], "parameters": [ { @@ -2073,7 +2393,8 @@ "summary": "Get the index for a value in an array", "description": "Checks whether the array specified for `data` contains the value specified in `value` and returns the zero-based index for the first match. If there's no match, `null` is returned.\n\n**Remarks:**\n\n* To get a boolean value returned use ``array_contains()``.\n* All definitions for the process ``eq()`` regarding the comparison of values apply here as well. A `null` return value from ``eq()`` is handled exactly as `false` (no match).\n* Data types MUST be checked strictly, for example a string with the content *1* is not equal to the number *1*.\n* An integer *1* is equal to a floating point number *1.0* as `integer` is a sub-type of `number`. Still, this process may return unexpectedly `false` when comparing floating point numbers due to floating point inaccuracy in machine-based computation.\n* Temporal strings are treated as normal strings and MUST NOT be interpreted.\n* If the specified value is an array, object or null, the process always returns `null`. See the examples for one to find `null` values.", "categories": [ - "arrays" + "arrays", + "reducer" ], "parameters": [ { @@ -2107,41 +2428,6 @@ ] }, "examples": [ - { - "title": "Find `null` values", - "description": "Get the index of the first `null` value in an array.", - "process_graph": { - "apply": { - "process_id": "array_apply", - "arguments": { - "data": { - "from_parameter": "data" - }, - "process": { - "process-graph": { - "is_null": { - "process_id": "is_nodata", - "arguments": { - "from_parameter": "x" - }, - "result": true - } - } - } - } - }, - "find": { - "process_id": "array_find", - "arguments": { - "data": { - "from_node": "apply" - }, - "value": true - }, - "result": true - } - } - }, { "arguments": { "data": [ @@ -2238,6 +2524,14 @@ }, "returns": null } + ], + "links": [ + { + "rel": "example", + "type": "application/json", + "href": "https://processes.openeo.org/1.0.0/examples/array_find_nodata.json", + "title": "Find no-data values in arrays" + } ] }, { @@ -2308,26 +2602,6 @@ "x": 0 }, "returns": 0 - }, - { - "process_graph": { - "sinh1": { - "process_id": "sinh", - "arguments": { - "x": 0.5 - } - }, - "arcsinh1": { - "process_id": "arsinh", - "arguments": { - "x": { - "from_node": "sinh1" - } - }, - "result": true - } - }, - "returns": 0.5 } ], "links": [ @@ -2372,26 +2646,6 @@ "x": 0 }, "returns": 0 - }, - { - "process_graph": { - "cosh1": { - "process_id": "tanh", - "arguments": { - "x": 0.5 - } - }, - "arctanh1": { - "process_id": "artanh", - "arguments": { - "x": { - "from_node": "tanh1" - } - }, - "result": true - } - }, - "returns": 0.5 } ], "links": [ @@ -2693,6 +2947,74 @@ } ] }, + { + "id": "climatological_normal", + "summary": "Computes climatology normals", + "description": "Climatological normal period is a usually 30 year average of a weather variable. Climatological normals are used as an average or baseline to evaluate climate events and provide context for yearly, monthly, daily or seasonal variability. The default climatology period is from 1981 until 2010 (both inclusive).", + "categories": [ + "climatology" + ], + "parameters": [ + { + "name": "data", + "description": "A data cube with exactly one temporal dimension. The data cube must span at least the temporal interval specified in the parameter `climatology-period`.\n\nSeasonal periods may span two consecutive years, e.g. temporal winter that includes months December, January and February. If the required months before the actual climate period are available, the season is taken into account. If not available, the first season is not taken into account and the seasonal mean is based on one year less than the other seasonal normals. The incomplete season at the end of the last year is never taken into account.", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + { + "name": "period", + "description": "The time intervals to aggregate the average value for. The following pre-defined frequencies are supported:\n\n* `day`: Day of the year\n* `month`: Month of the year\n* `climatology-period`: The period specified in the `climatology-period`.\n* `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November).\n* `tropical-season`: Six month periods of the tropical seasons (November - April, May - October).", + "schema": { + "type": "string", + "enum": [ + "day", + "month", + "season", + "tropical-season", + "climatology-period" + ] + } + }, + { + "name": "climatology_period", + "description": "The climatology period as closed temporal interval. The first element of the array is the first year to be fully included in the temporal interval. The second element is the last year to be fully included in the temporal interval. The default period is from 1981 until 2010 (both inclusive).", + "schema": { + "type": "array", + "subtype": "temporal-interval", + "minItems": 2, + "maxItems": 2, + "items": { + "type": "string", + "subtype": "year", + "minLength": 4, + "maxLength": 4, + "pattern": "^\\d{4}$" + } + }, + "default": [ + "1981", + "2010" + ], + "optional": true + } + ], + "returns": { + "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the temporal dimension. The temporal dimension has the following dimension labels:\n\n* `day`: `001` - `365`\n* `month`: `01` - `12`\n* `climatology-period`: `climatology-period`\n* `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November)\n* `tropical-season`: `ndjfma` (November - April), `mjjaso` (May - October)", + "schema": { + "type": "object", + "subtype": "raster-cube" + } + }, + "links": [ + { + "rel": "about", + "href": "https://en.wikipedia.org/wiki/Climatological_normal", + "title": "Background information on climatology normal by Wikipedia" + } + ] + }, { "id": "clip", "summary": "Clip a value between a minimum and a maximum", @@ -2780,8 +3102,7 @@ { "from_parameter": "x" } - ], - "ignore_nodata": false + ] } }, "max": { @@ -2794,13 +3115,35 @@ { "from_node": "min" } - ], - "ignore_nodata": false + ] }, "result": true } } }, + { + "id": "constant", + "summary": "Define a constant value", + "description": "Defines a constant value that can be reused in multiple places of a process.", + "categories": [ + "math > constants" + ], + "parameters": [ + { + "name": "x", + "description": "The value of the constant.", + "schema": { + "description": "Any data type." + } + } + ], + "returns": { + "description": "The value of the constant.", + "schema": { + "description": "Any data type." + } + } + }, { "id": "cos", "summary": "Cosine", @@ -3041,7 +3384,7 @@ ], "parameters": [], "returns": { - "description": "An empty raster data cube.", + "description": "An empty raster data cube with zero dimensions.", "schema": { "type": "object", "subtype": "raster-cube" @@ -3657,7 +4000,7 @@ } ], "returns": { - "description": "The data cube with one dimension less.", + "description": "A data cube without the specified dimension. The number of dimensions decreases by one, but the dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -3687,27 +4030,6 @@ "type": "number" } }, - "examples": [ - { - "title": "Test for ln(e) = 1", - "process_graph": { - "e1": { - "process_id": "e", - "arguments": {} - }, - "ln1": { - "process_id": "ln", - "arguments": { - "data": { - "from_node": "e1" - } - }, - "result": true - } - }, - "returns": 1 - } - ], "links": [ { "rel": "about", @@ -4079,6 +4401,7 @@ "summary": "Filter the bands by name", "description": "Filters the bands in the data cube so that bands that don't match any of the criteria are dropped from the data cube. The data cube is expected to have only one dimension of type `bands`. Fails with a `DimensionMissing` error if no such dimension exists.\n\nThe following criteria can be used to select bands:\n\n* `bands`: band name or common band name (e.g. `B01`, `B8A`, `red` or `nir`)\n* `wavelengths`: ranges of wavelengths in micrometres (μm) (e.g. 0.5 - 0.6)\n\nAll these information are exposed in the band metadata of the collection. To keep algorithms interoperable it is recommended to prefer the common bands names or the wavelengths over collection and/or back-end specific band names.\n\nIf multiple criteria are specified, any of them must match and not all of them, i.e. they are combined with an OR-operation. If no criteria is specified, the `BandFilterParameterMissing` exception must be thrown.\n\n**Important:** The order of the specified array defines the order of the bands in the data cube, which can be important for subsequent processes. If multiple bands are matched by a single criterion (e.g. a range of wavelengths), they stay in the original order.", "categories": [ + "cubes", "filter" ], "parameters": [ @@ -4134,7 +4457,7 @@ } ], "returns": { - "description": "A data cube limited to a subset of its original bands. Therefore, the cardinality is potentially lower, but the resolution and the number of dimensions are the same as for the original data cube.", + "description": "A data cube limited to a subset of its original bands. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the dimension of type `bands` has less (or the same) dimension labels.", "schema": { "type": "object", "subtype": "raster-cube" @@ -4161,6 +4484,7 @@ "summary": "Spatial filter using a bounding box", "description": "Limits the data cube to the specified bounding box.\n\nThe filter retains a pixel in the data cube if the point at the pixel center intersects with the bounding box (as defined in the Simple Features standard by the OGC).", "categories": [ + "cubes", "filter" ], "parameters": [ @@ -4226,7 +4550,7 @@ "subtype": "epsg-code", "minimum": 1000, "examples": [ - 7099 + 3857 ] }, { @@ -4248,7 +4572,7 @@ } ], "returns": { - "description": "A data cube restricted to the bounding box. Therefore, the cardinality is potentially lower, but the resolution and the number of dimensions are the same as for the original data cube.", + "description": "A data cube restricted to the bounding box. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels.", "schema": { "type": "object", "subtype": "raster-cube" @@ -4327,7 +4651,7 @@ }, { "name": "dimension", - "description": "The name of the dimension to filter on. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.\n\n**Remarks:**\n\n* The default dimensions a data cube provides are described in the collection's metadata field `cube:dimensions`.\n* There could be multiple spatial dimensions such as `x`, `y` or `z`.\n* For multi-spectral imagery there is usually a separate dimension of type `bands` for the bands.", + "description": "The name of the dimension to filter on. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.", "schema": { "type": "string" } @@ -4343,7 +4667,7 @@ } ], "returns": { - "description": "The filtered data cube. Therefore, the cardinality of the dimension is potentially lower, but the resolution and the number of dimensions are the same as for the original data cube.", + "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension labels.", "schema": { "type": "object", "subtype": "raster-cube" @@ -4357,32 +4681,26 @@ "examples": [ { "description": "Filters the data cube to only contain data from platform Sentinel-2A. This example assumes that the data cube has a dimension `platform` so that computations can distinguish between Sentinel-2A and Sentinel-2B data.", - "process_graph": { - "filter": { - "process_id": "filter_labels", - "arguments": { - "data": { - "from_parameter": "sentinel2_data" - }, - "condition": { - "process_graph": { - "eq": { - "process_id": "eq", - "arguments": { - "x": { - "from_parameter": "value" - }, - "y": "Sentinel-2A", - "case_sensitive": false - }, - "result": true - } - } - }, - "dimension": "platform" - }, - "result": true - } + "arguments": { + "data": { + "from_parameter": "sentinel2_data" + }, + "condition": { + "process_graph": { + "eq": { + "process_id": "eq", + "arguments": { + "x": { + "from_parameter": "value" + }, + "y": "Sentinel-2A", + "case_sensitive": false + }, + "result": true + } + } + }, + "dimension": "platform" } } ] @@ -4392,6 +4710,7 @@ "summary": "Spatial filter using geometries", "description": "Limits the data cube over the spatial dimensions to the specified geometries.\n\n- For **polygons**, the filter retains a pixel in the data cube if the point at the pixel center intersects with at least one of the polygons (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nMore specifically, pixels outside of the bounding box of the given geometry will not be available after filtering. All pixels inside the bounding box that are not retained will be set to `null` (no data).", "categories": [ + "cubes", "filter" ], "parameters": [ @@ -4413,7 +4732,7 @@ } ], "returns": { - "description": "A data cube restricted to the specified geometries. Therefore, the cardinality is potentially lower, but the resolution and the number of dimensions are the same as for the original data cube.", + "description": "A data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels.", "schema": { "type": "object", "subtype": "raster-cube" @@ -4432,6 +4751,7 @@ "summary": "Temporal filter for a temporal intervals", "description": "Limits the data cube to the specified interval of dates and/or times.\n\nMore precisely, the filter checks whether the temporal dimension label is greater than or equal to the lower boundary (start date/time) and the temporal dimension label is less than the value of the upper boundary (end date/time). This corresponds to a left-closed interval, which contains the lower boundary but not the upper boundary.\n\nIf the dimension is set to `null` (it's the default value), the data cube is expected to only have one temporal dimension.", "categories": [ + "cubes", "filter" ], "parameters": [ @@ -4463,6 +4783,13 @@ "format": "date", "subtype": "date" }, + { + "type": "string", + "subtype": "year", + "minLength": 4, + "maxLength": 4, + "pattern": "^\\d{4}$" + }, { "type": "null" } @@ -4482,7 +4809,7 @@ }, { "name": "dimension", - "description": "The name of the temporal dimension to filter on. If the dimension is not set or is set to `null`, the filter applies to all temporal dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.\n\n**Note:** The default dimensions a data cube provides are described in the collection's metadata field `cube:dimensions`.", + "description": "The name of the temporal dimension to filter on. If the dimension is not set or is set to `null`, the filter applies to all temporal dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.", "schema": { "type": [ "string", @@ -4494,7 +4821,7 @@ } ], "returns": { - "description": "A data cube restricted to the specified temporal extent. Therefore, the cardinality is potentially lower, but the resolution and the number of dimensions are the same as for the original data cube.", + "description": "A data cube restricted to the specified temporal extent. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the given temporal dimension(s) have less (or the same) dimension labels.", "schema": { "type": "object", "subtype": "raster-cube" @@ -4509,7 +4836,7 @@ { "id": "first", "summary": "First element", - "description": "Gives the first element of an array. For an empty array `null` is returned.", + "description": "Gives the first element of an array.\n\nAn array without non-`null` elements resolves always with `null`.", "categories": [ "arrays", "reducer" @@ -4517,7 +4844,7 @@ "parameters": [ { "name": "data", - "description": "An array with elements of any data type. An empty array resolves always with `null`.", + "description": "An array with elements of any data type.", "schema": { "type": "array", "items": { @@ -5184,7 +5511,7 @@ { "id": "last", "summary": "Last element", - "description": "Gives the last element of an array. For an empty array `null` is returned.", + "description": "Gives the last element of an array.\n\nAn array without non-`null` elements resolves always with `null`.", "categories": [ "arrays", "reducer" @@ -5192,7 +5519,7 @@ "parameters": [ { "name": "data", - "description": "An array with elements of any data type. An empty array resolves always with `null`.", + "description": "An array with elements of any data type.", "schema": { "type": "array", "items": { @@ -5261,14 +5588,14 @@ { "id": "linear_scale_range", "summary": "Linear transformation between two ranges", - "description": "Performs a linear transformation between the input and output range.\n\nThe underlying formula is: *((x - inputMin) / (inputMax - inputMin)) * (outputMax - outputMin) + outputMin*.\n\nPotential use case include\n\n* scaling values to the 8-bit range (0 - 255) often used for numeric representation of values in one of the channels of the [RGB colour model](https://en.wikipedia.org/wiki/RGB_color_model#Numeric_representations) or\n* calculating percentages (0 - 100).\n\nThe no-data value `null` is passed through and therefore gets propagated.", + "description": "Performs a linear transformation between the input and output range.\n\nThe given number in `x` is clipped to the bounds specified in `inputMin` and `inputMax` so that the underlying formula *((x - inputMin) / (inputMax - inputMin)) * (outputMax - outputMin) + outputMin* never returns any value lower than `outputMin` or greater than `outputMax`.\n\nPotential use case include\n\n* scaling values to the 8-bit range (0 - 255) often used for numeric representation of values in one of the channels of the [RGB colour model](https://en.wikipedia.org/wiki/RGB_color_model#Numeric_representations) or\n* calculating percentages (0 - 100).\n\nThe no-data value `null` is passed through and therefore gets propagated.", "categories": [ "math" ], "parameters": [ { "name": "x", - "description": "A number to transform.", + "description": "A number to transform. The number gets clipped to the bounds specified in `inputMin` and `inputMax`.", "schema": { "type": [ "number", @@ -5344,6 +5671,17 @@ "inputMax": 100 }, "returns": null + }, + { + "description": "Shows that the input data is clipped.", + "arguments": { + "x": 1.12, + "inputMin": 0, + "inputMax": 1, + "outputMin": 0, + "outputMax": 255 + }, + "returns": 255 } ], "process_graph": { @@ -5445,25 +5783,6 @@ } }, "examples": [ - { - "title": "Example for ln(e) = 1", - "process_graph": { - "e1": { - "process_id": "e", - "arguments": {} - }, - "ln1": { - "process_id": "ln", - "arguments": { - "data": { - "from_node": "e1" - } - }, - "result": true - } - }, - "returns": 1 - }, { "arguments": { "x": 1 @@ -5517,7 +5836,7 @@ "schema": { "type": "string", "subtype": "collection-id", - "pattern": "^[A-Za-z0-9_\\-\\.~/]+$" + "pattern": "^[\\w\\-\\.~/]+$" } }, { @@ -5576,7 +5895,7 @@ "subtype": "epsg-code", "minimum": 1000, "examples": [ - 7099 + 3857 ] }, { @@ -5628,6 +5947,13 @@ "format": "date", "subtype": "date" }, + { + "type": "string", + "subtype": "year", + "minLength": 4, + "maxLength": 4, + "pattern": "^\\d{4}$" + }, { "type": "null" } @@ -5663,7 +5989,7 @@ } }, { - "title": "No Filter", + "title": "No filter", "description": "Don't filter bands. All bands are included in the data cube.", "type": "null" } @@ -5673,10 +5999,13 @@ }, { "name": "properties", - "description": "Limits the data by metadata properties to include only data in the data cube which all given conditions return `true` for (AND operation).\n\nSpecify key-value-pairs with the keys being the name of the metadata property, which can be retrieved with the openEO Data Discovery for Collections. The values must be conditions to be evaluated against the collection metadata, see the example.", + "description": "Limits the data by metadata properties to include only data in the data cube which all given conditions return `true` for (AND operation).\n\nSpecify key-value-pairs with the key being the name of the metadata property, which can be retrieved with the openEO Data Discovery for Collections. The value must a condition (user-defined process) to be evaluated against the collection metadata, see the example.", "schema": [ { "type": "object", + "subtype": "metadata-filter", + "title": "Filters", + "description": "A list of filters to check against. Specify key-value-pairs with the key being the name of the metadata property name and the value being a process evaluated against the metadata values.", "additionalProperties": { "type": "object", "subtype": "process-graph", @@ -5684,7 +6013,6 @@ { "name": "value", "description": "The property value to be checked against.", - "required": true, "schema": { "description": "Any data type." } @@ -5703,7 +6031,7 @@ } ], "returns": { - "description": "A data cube for further processing.", + "description": "A data cube for further processing. The dimensions and dimension properties (name, type, labels, reference system and resolution) correspond to the collection's metadata, but the dimension labels are restricted as specified in the parameters.", "schema": { "type": "object", "subtype": "raster-cube" @@ -5712,55 +6040,49 @@ "examples": [ { "description": "Loading `Sentinel-2B` data from a `Sentinel-2` collection for 2018, but only with cloud cover between 0 and 50%.", - "process_graph": { - "loadco1": { - "process_id": "load_collection", - "arguments": { - "id": "Sentinel-2", - "spatial_extent": { - "west": 16.1, - "east": 16.6, - "north": 48.6, - "south": 47.2 - }, - "temporal_extent": [ - "2018-01-01", - "2019-01-01" - ], - "properties": { - "eo:cloud_cover": { - "process_graph": { - "cc": { - "process_id": "between", - "arguments": { - "x": { - "from_parameter": "value" - }, - "min": 0, - "max": 50 - }, - "result": true - } - } - }, - "platform": { - "process_graph": { - "pf": { - "process_id": "eq", - "arguments": { - "x": { - "from_parameter": "value" - }, - "y": "Sentinel-2B", - "case_sensitive": false - }, - "result": true - } - } + "arguments": { + "id": "Sentinel-2", + "spatial_extent": { + "west": 16.1, + "east": 16.6, + "north": 48.6, + "south": 47.2 + }, + "temporal_extent": [ + "2018-01-01", + "2019-01-01" + ], + "properties": { + "eo:cloud_cover": { + "process_graph": { + "cc": { + "process_id": "between", + "arguments": { + "x": { + "from_parameter": "value" + }, + "min": 0, + "max": 50 + }, + "result": true } } }, - "result": true + "platform": { + "process_graph": { + "pf": { + "process_id": "eq", + "arguments": { + "x": { + "from_parameter": "value" + }, + "y": "Sentinel-2B", + "case_sensitive": false + }, + "result": true + } + } + } } } } @@ -5808,7 +6130,7 @@ "schema": { "type": "string", "subtype": "job-id", - "pattern": "^[A-Za-z0-9_\\-\\.~]+$" + "pattern": "^[\\w\\-\\.~]+$" } } ], @@ -5843,7 +6165,7 @@ }, { "name": "format", - "description": "The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. This parameter is *case insensitive*.", + "description": "The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*.", "schema": { "type": "string", "subtype": "input-format" @@ -5851,7 +6173,7 @@ }, { "name": "options", - "description": "The file format options to be used to read the files. Must correspond to the options that the server reports as supported options for the chosen `format`. The option names and valid values usually correspond to the GDAL/OGR format options.", + "description": "The file format parameters to be used to read the files. Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options.", "schema": { "type": "object", "subtype": "input-format-options" @@ -5866,6 +6188,11 @@ "type": "object", "subtype": "raster-cube" } + }, + "exceptions": { + "FormatUnsuitable": { + "message": "Data can't be loaded with the requested input format." + } } }, { @@ -6194,6 +6521,7 @@ "summary": "Apply a polygon mask", "description": "Applies a polygon mask to a raster data cube. To apply a raster mask use ``mask()``.\n\nAll pixels for which the point at the pixel center **does not** intersect with any polygon (as defined in the Simple Features standard by the OGC) are replaced. This behaviour can be inverted by setting the parameter `inside` to `true`.\n\nThe pixel values are replaced with the value specified for `replacement`, which defaults to `null` (no data). No data values in `data` will be left untouched by the masking operation.", "categories": [ + "cubes", "masks" ], "parameters": [ @@ -6238,7 +6566,7 @@ } ], "returns": { - "description": "The masked raster data cube.", + "description": "A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -6292,7 +6620,7 @@ } ], "returns": { - "description": "The masked raster data cube.", + "description": "A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -6302,7 +6630,7 @@ { "id": "max", "summary": "Maximum value", - "description": "Computes the largest value of an array of numbers, which is is equal to the first element of a sorted (i.e., ordered) version the array.", + "description": "Computes the largest value of an array of numbers, which is is equal to the first element of a sorted (i.e., ordered) version the array.\n\nAn array without non-`null` elements resolves always with `null`.", "categories": [ "math", "reducer" @@ -6310,7 +6638,7 @@ "parameters": [ { "name": "data", - "description": "An array of numbers. An empty array resolves always with `null`.", + "description": "An array of numbers.", "schema": { "type": "array", "items": { @@ -6395,7 +6723,7 @@ { "id": "mean", "summary": "Arithmetic mean (average)", - "description": "The arithmetic mean of an array of numbers is the quantity commonly called the average. It is defined as the sum of all elements divided by the number of elements.", + "description": "The arithmetic mean of an array of numbers is the quantity commonly called the average. It is defined as the sum of all elements divided by the number of elements.\n\nAn array without non-`null` elements resolves always with `null`.", "categories": [ "math", "reducer" @@ -6403,7 +6731,7 @@ "parameters": [ { "name": "data", - "description": "An array of numbers. An empty array resolves always with `null`.", + "description": "An array of numbers.", "schema": { "type": "array", "items": { @@ -6472,6 +6800,16 @@ "data": [] }, "returns": null + }, + { + "description": "The input array has only `null` elements: return `null`.", + "arguments": { + "data": [ + null, + null + ] + }, + "returns": null } ], "links": [ @@ -6480,12 +6818,78 @@ "href": "http://mathworld.wolfram.com/ArithmeticMean.html", "title": "Arithmetic mean explained by Wolfram MathWorld" } - ] + ], + "process_graph": { + "count_condition": { + "process_id": "if", + "arguments": { + "value": { + "from_parameter": "ignore_nodata" + }, + "accept": null, + "reject": true + } + }, + "count": { + "process_id": "count", + "arguments": { + "data": { + "from_parameter": "data" + }, + "condition": { + "from_node": "count_condition" + } + } + }, + "sum": { + "process_id": "sum", + "arguments": { + "data": { + "from_parameter": "data" + }, + "ignore_nodata": { + "from_parameter": "ignore_nodata" + } + } + }, + "divide": { + "process_id": "divide", + "arguments": { + "x": { + "from_node": "sum" + }, + "y": { + "from_node": "count" + } + } + }, + "neq": { + "process_id": "neq", + "arguments": { + "x": { + "from_node": "count" + }, + "y": 0 + } + }, + "if": { + "process_id": "if", + "arguments": { + "value": { + "from_node": "neq" + }, + "accept": { + "from_node": "divide" + } + }, + "result": true + } + } }, { "id": "median", "summary": "Statistical median", - "description": "The statistical median of an array of numbers is the value separating the higher half from the lower half of the data.\n\n**Remarks:**\n\n* For a symmetric arrays, the result is equal to the ``mean()``.\n* The median can also be calculated by computing the quantile (see process ``quantiles()``) with the probability of *0.5*: `quantiles(data, [0.5])`.\n* An empty input array returns `null`.", + "description": "The statistical median of an array of numbers is the value separating the higher half from the lower half of the data.\n\nAn array without non-`null` elements resolves always with `null`.\n\n**Remarks:**\n\n* For a symmetric arrays, the result is equal to the ``mean()``.\n* The median can also be calculated by computing the ``quantiles()`` with a probability of *0.5*.", "categories": [ "math", "reducer" @@ -6493,7 +6897,7 @@ "parameters": [ { "name": "data", - "description": "An array of numbers. An empty array resolves always with `null`.", + "description": "An array of numbers.", "schema": { "type": "array", "items": { @@ -6582,6 +6986,16 @@ "data": [] }, "returns": null + }, + { + "description": "The input array has only `null` elements: return `null`.", + "arguments": { + "data": [ + null, + null + ] + }, + "returns": null } ], "links": [ @@ -6590,7 +7004,34 @@ "href": "http://mathworld.wolfram.com/StatisticalMedian.html", "title": "Statistical Median explained by Wolfram MathWorld" } - ] + ], + "process_graph": { + "quantiles": { + "process_id": "quantiles", + "arguments": { + "data": { + "from_parameter": "data" + }, + "probabilities": [ + 0.5 + ], + "ignore_nodata": { + "from_parameter": "ignore_nodata" + } + } + }, + "array_element": { + "process_id": "array_element", + "arguments": { + "data": { + "from_node": "quantiles" + }, + "return_nodata": true, + "index": 0 + }, + "result": true + } + } }, { "id": "merge_cubes", @@ -6662,7 +7103,7 @@ } ], "returns": { - "description": "The merged data cube.", + "description": "The merged data cube. See the process description for details regarding the dimensions and dimension properties (name, type, labels, reference system and resolution).", "schema": { "type": "object", "subtype": "raster-cube" @@ -6684,7 +7125,7 @@ { "id": "min", "summary": "Minimum value", - "description": "Computes the smallest value of an array of numbers, which is is equal to the last element of a sorted (i.e., ordered) version the array.", + "description": "Computes the smallest value of an array of numbers, which is is equal to the last element of a sorted (i.e., ordered) version the array.\n\nAn array without non-`null` elements resolves always with `null`.", "categories": [ "math", "reducer" @@ -6692,7 +7133,7 @@ "parameters": [ { "name": "data", - "description": "An array of numbers. An empty array resolves always with `null`.", + "description": "An array of numbers.", "schema": { "type": "array", "items": { @@ -6776,7 +7217,7 @@ { "id": "mod", "summary": "Modulo", - "description": "Remainder after division of `x` by `y`.\n\nThe result of a modulo operation has the sign of the divisor. The handling regarding the sign of the result [differs between programming languages](https://en.wikipedia.org/wiki/Modulo_operation) and needs careful consideration while implementing this process.\n\nThe no-data value `null` is passed through and therefore gets propagated if any of the arguments is `null`.", + "description": "Remainder after division of `x` by `y` for both integers and floating-point numbers.\n\nThe result of a modulo operation has the sign of the divisor. The handling regarding the sign of the result [differs between programming languages](https://en.wikipedia.org/wiki/Modulo_operation#In_programming_languages) and needs careful consideration to avoid unexpected results.\n\nThe no-data value `null` is passed through and therefore gets propagated if any of the arguments is `null`. A modulo by zero results in ±infinity if the processing environment supports it. Otherwise a `DivisionByZero` error must the thrown.", "categories": [ "math" ], @@ -6811,6 +7252,11 @@ ] } }, + "exceptions": { + "DivisionByZero": { + "message": "Division by zero is not supported." + } + }, "examples": [ { "arguments": { @@ -6828,10 +7274,10 @@ }, { "arguments": { - "x": 27, - "y": -5 + "x": 3.14, + "y": -2 }, - "returns": -3 + "returns": -0.86 }, { "arguments": { @@ -7002,7 +7448,7 @@ "schema": [ { "type": "string", - "pattern": "^[A-Za-z0-9_]+$" + "pattern": "^\\w+$" }, { "type": "null" @@ -7013,7 +7459,7 @@ } ], "returns": { - "description": "A raster data cube containing the computed NDVI values.", + "description": "A raster data cube containing the computed NDVI values. The structure of the data cube differs depending on the value passed to `target_band`:\n\n* `target_band` is `null`: The data cube does not contain the dimension of type `bands` any more, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.\n* `target_band` is a string: The data cube keeps the same dimensions. The dimension properties remain unchanged, but the number of dimension labels for the dimension of type `bands` increases by one. The additional label is named as specified in `target_band`.", "schema": { "type": "object", "subtype": "raster-cube" @@ -7698,27 +8144,6 @@ "type": "number" } }, - "examples": [ - { - "title": "Test for sin(π) = 0", - "process_graph": { - "pi1": { - "process_id": "pi", - "arguments": {} - }, - "sin1": { - "process_id": "sin", - "arguments": { - "data": { - "from_node": "pi1" - } - }, - "result": true - } - }, - "returns": 0 - } - ], "links": [ { "rel": "about", @@ -8276,7 +8701,7 @@ }, { "name": "dimension", - "description": "The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.\n\n**Remarks:**\n\n* The default dimensions a data cube provides are described in the collection's metadata field `cube:dimensions`.\n* There could be multiple spatial dimensions such as `x`, `y` or `z`.\n* For multi-spectral imagery there is usually a separate dimension of type `bands` for the bands.", + "description": "The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.", "schema": { "type": "string" } @@ -8292,7 +8717,7 @@ } ], "returns": { - "description": "A data cube with the newly computed values. It has one dimension less than the original data cube, but the remaining dimensions have their resolution and cardinality preserved.", + "description": "A data cube with the newly computed values. It is missing the given dimension, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -8360,7 +8785,7 @@ }, { "name": "dimension", - "description": "The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.\n\n**Remarks:**\n\n* The default dimensions a data cube provides are described in the collection's metadata field `cube:dimensions`.\n* There could be multiple spatial dimensions such as `x`, `y` or `z`.\n* For multi-spectral imagery there is usually a separate dimension of type `bands` for the bands.", + "description": "The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.", "schema": { "type": "string" } @@ -8376,7 +8801,7 @@ } ], "returns": { - "description": "A data cube with the newly computed values. It has one dimension less than the original data cube, but the remaining dimensions have their resolution and cardinality preserved.", + "description": "A data cube with the newly computed values. It is missing the given dimension, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -8420,7 +8845,7 @@ } ], "returns": { - "description": "The data cube with the renamed dimension. The old name can not be referenced any longer.", + "description": "A data cube with the same dimensions, but the name of one of the dimensions changes. The old name can not be referred to any longer. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", "subtype": "raster-cube" @@ -8496,7 +8921,7 @@ } ], "returns": { - "description": "The data cube with the renamed labels. The old labels can not be referenced any longer.", + "description": "The data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that for the given dimension the labels change. The old labels can not be referred to any longer. The number of labels remains the same.", "schema": { "type": "object", "subtype": "raster-cube" @@ -8519,101 +8944,38 @@ "examples": [ { "title": "Rename named labels", - "description": "Renaming the bands from `B1` to `red`, from `B2` to `green` and from `B3` to `blue`. Doesn't rename the band `QA`.", - "process_graph": { - "loadco1": { - "process_id": "load_collection", - "arguments": { - "id": "S2-RGB", - "spatial_extent": null, - "temporal_extent": null, - "bands": [ - "B1", - "B2", - "B3", - "QA" - ] - } + "description": "Renaming the bands from `B1` to `red`, from `B2` to `green` and from `B3` to `blue`.", + "arguments": { + "data": { + "from_parameter": "data" }, - "rename1": { - "process_id": "rename_labels", - "arguments": { - "data": { - "from_node": "loadco1" - }, - "dimension": "bands", - "source": [ - "B1", - "B2", - "B3" - ], - "target": [ - "red", - "green", - "blue" - ] - }, - "result": true - } + "dimension": "bands", + "source": [ + "B1", + "B2", + "B3" + ], + "target": [ + "red", + "green", + "blue" + ] } - }, + } + ], + "links": [ { - "title": "Rename enumerated labels", - "description": "The process replaces the temporal dimension with a new dimension `min_max` with enumerated labels. The first label refers to the minimum values, the second label refers to the maximum values. Afterwards, the dimension labels are renamed to `min` and `max` respectively.", - "process_graph": { - "loadco1": { - "process_id": "load_collection", - "arguments": { - "id": "S2-RGB", - "spatial_extent": null, - "temporal_extent": null - } - }, - "apply1": { - "process_id": "apply_dimension", - "arguments": { - "data": { - "from_node": "loadco1" - }, - "process": { - "process_graph": { - "extrem1": { - "process_id": "extrema", - "arguments": { - "data": { - "from_parameter": "data" - } - }, - "result": true - } - } - }, - "dimension": "t", - "target_dimension": "min_max" - } - }, - "rename1": { - "process_id": "rename_labels", - "arguments": { - "data": { - "from_node": "apply1" - }, - "dimension": "bands", - "target": [ - "min", - "max" - ] - }, - "result": true - } - } + "rel": "example", + "type": "application/json", + "href": "https://processes.openeo.org/1.0.0/examples/rename-enumerated-labels.json", + "title": "Rename enumerated labels" } ] }, { "id": "resample_cube_spatial", - "summary": "Resample the spatial dimensions to a target data cube", - "description": "Resamples the spatial dimensions (x,y) from a source data cube to a target data cube and return the results as a new data cube.", + "summary": "Resample the spatial dimensions to match a target data cube", + "description": "Resamples the spatial dimensions (x,y) from a source data cube to align with the corresponding dimensions of the given target data cube. Returns a new data cube with the resampled dimensions.\n\nTo resample a data cube to a specific resolution or projection regardless of an existing target data cube, refer to ``resample_spatial()``.", "categories": [ "cubes", "aggregate & resample" @@ -8660,7 +9022,7 @@ } ], "returns": { - "description": "A data cube with potentially lower spatial resolution and potentially lower cardinality, but the same number of dimensions as the original data cube.", + "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the spatial dimensions.", "schema": { "type": "object", "subtype": "raster-cube" @@ -8668,7 +9030,7 @@ }, "links": [ { - "href": "https://open-eo.github.io/openeo-api/glossary/#aggregation-and-resampling", + "href": "https://openeo.org/documentation/1.0/glossary.html#resample-changing-data-cube-geometry", "rel": "about", "title": "Resampling explained in the openEO glossary" } @@ -8676,8 +9038,8 @@ }, { "id": "resample_cube_temporal", - "summary": "Resample a temporal dimension to a target data cube", - "description": "Resamples a temporal dimension from a source data cube to a target data cube and return the results as a new data cube.\n\nIf the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension.", + "summary": "Resample a temporal dimension to match a target data cube", + "description": "Resamples the given temporal dimension from a source data cube to align with the corresponding dimension of the given target data cube. Returns a new data cube with the resampled dimension.\n\nIf the dimension is not set or is set to `null`, the data cube is expected to have one temporal dimension only.", "categories": [ "cubes", "aggregate & resample" @@ -8731,7 +9093,7 @@ }, { "name": "dimension", - "description": "The name of the temporal dimension to resample, which must exist with this name in both data cubes. If the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.\n\n**Note:** The default dimensions a data cube provides are described in the collection's metadata field `cube:dimensions`.", + "description": "The name of the temporal dimension to resample, which must exist with this name in both data cubes. If the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist.", "schema": { "type": [ "string", @@ -8752,7 +9114,7 @@ } ], "returns": { - "description": "A data cube with potentially lower temporal resolution and potentially lower cardinality, but the same number of dimensions as the original data cube.", + "description": "A raster data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension the name and type remain unchanged, but the reference system changes and the labels and resolution may change.", "schema": { "type": "object", "subtype": "raster-cube" @@ -8768,7 +9130,7 @@ }, "links": [ { - "href": "https://open-eo.github.io/openeo-api/glossary/#aggregation-and-resampling", + "href": "https://openeo.org/documentation/1.0/glossary.html#resample-changing-data-cube-geometry", "rel": "about", "title": "Resampling explained in the openEO glossary" } @@ -8777,7 +9139,7 @@ { "id": "resample_spatial", "summary": "Resample and warp the spatial dimensions", - "description": "Resamples the spatial dimensions (x,y) of the data cube to a specified resolution and/or warps the data cube to the target projection. At least `resolution` or `projection` must be specified.\n\nUse ``filter_bbox()`` to set the target spatial extent.", + "description": "Resamples the spatial dimensions (x,y) of the data cube to a specified resolution and/or warps the data cube to the target projection. At least `resolution` or `projection` must be specified.\n\nRelated processes:\n\n* Use ``filter_bbox()`` to set the target spatial extent.\n* To spatially align two data cubes with each other (e.g. for merging), better use the process ``resample_cube_spatial()``.", "categories": [ "cubes", "aggregate & resample" @@ -8824,7 +9186,7 @@ "subtype": "epsg-code", "minimum": 1000, "examples": [ - 7099 + 3857 ] }, { @@ -8886,7 +9248,7 @@ } ], "returns": { - "description": "A raster data cube with values warped onto the new projection.", + "description": "A raster data cube with values warped onto the new projection. It has the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-spatial or vertical spatial dimensions. For the horizontal spatial dimensions the name and type remain unchanged, but reference system, labels and resolution may change depending on the given parameters.", "schema": { "type": "object", "subtype": "raster-cube" @@ -8913,7 +9275,7 @@ { "id": "round", "summary": "Round to a specified precision", - "description": "Rounds a real number `x` to specified precision `p`.\n\nIf the fractional part of `x` is halfway between two integers, one of which is even and the other odd, then the even number is returned.\nThis behaviour follows [IEEE Standard 754](https://ieeexplore.ieee.org/document/8766229). This kind of rounding is also called \"rounding to nearest\" or \"banker's rounding\". It minimizes rounding errors that result from consistently rounding a midpoint value in a single direction.\n\nThe no-data value `null` is passed through and therefore gets propagated.", + "description": "Rounds a real number `x` to specified precision `p`.\n\nIf the fractional part of `x` is halfway between two integers, one of which is even and the other odd, then the even number is returned.\nThis behaviour follows [IEEE Standard 754](https://ieeexplore.ieee.org/document/8766229). This kind of rounding is also called \"round to nearest (even)\" or \"banker's rounding\". It minimizes rounding errors that result from consistently rounding a midpoint value in a single direction.\n\nThe no-data value `null` is passed through and therefore gets propagated.", "categories": [ "math > rounding" ], @@ -9004,8 +9366,9 @@ { "id": "run_udf_externally", "summary": "Run an externally hosted UDF container", - "description": "Runs a compatible UDF container that is either externally hosted by a service provider or running on a local machine of the user. The UDF container must follow the [openEO UDF specification](https://open-eo.github.io/openeo-udf/).\n\nThe referenced UDF service can be executed in several processes such as ``aggregate_spatial()``, ``apply()``, ``apply_dimension()`` and ``reduce_dimension()``. In this case an array is passed instead of a raster data cube. The user must ensure that the data is properly passed as an array so that the UDF can make sense of it.", + "description": "Runs a compatible UDF container that is either externally hosted by a service provider or running on a local machine of the user. The UDF container must follow the [openEO UDF specification](https://openeo.org/documentation/1.0/udfs.html).\n\nThe referenced UDF service can be executed in several processes such as ``aggregate_spatial()``, ``apply()``, ``apply_dimension()`` and ``reduce_dimension()``. In this case an array is passed instead of a raster data cube. The user must ensure that the data is properly passed as an array so that the UDF can make sense of it.", "categories": [ + "cubes", "import", "udf" ], @@ -9054,7 +9417,7 @@ } ], "returns": { - "description": "The data processed by the UDF service. Returns a raster data cube if a raster data cube was passed for `data`. If an array was passed for `data`, the returned value is defined by the context and is exactly what the UDF returned.", + "description": "The data processed by the UDF service.\n\n* Returns a raster data cube, if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF.\n* If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns.", "schema": [ { "title": "Raster data cube", @@ -9070,7 +9433,7 @@ "links": [ { "rel": "about", - "href": "https://open-eo.github.io/openeo-udf/", + "href": "https://openeo.org/documentation/1.0/udfs.html", "title": "openEO UDF specification" }, { @@ -9085,6 +9448,7 @@ "summary": "Run an UDF", "description": "Runs an UDF in one of the supported runtime environments.\n\nThe process can either:\n\n1. load and run a locally stored UDF from a file in the workspace of the authenticated user. The path to the UDF file must be relative to the root directory of the user's workspace.\n2. fetch and run a remotely stored and published UDF by absolute URI, for example from [openEO Hub](https://hub.openeo.org)).\n3. run the source code specified inline as string.\n\nThe loaded UDF can be executed in several processes such as ``aggregate_spatial()``, ``apply()``, ``apply_dimension()`` and ``reduce_dimension()``. In this case an array is passed instead of a raster data cube. The user must ensure that the data is properly passed as an array so that the UDF can make sense of it.", "categories": [ + "cubes", "import", "udf" ], @@ -9174,7 +9538,7 @@ } }, "returns": { - "description": "The data processed by the UDF. Returns a raster data cube if a raster data cube was passed for `data`. If an array was passed for `data`, the returned value is defined by the context and is exactly what the UDF returned.", + "description": "The data processed by the UDF.\n\n* Returns a raster data cube, if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF.\n* If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns.", "schema": [ { "title": "Raster data cube", @@ -9213,7 +9577,7 @@ }, { "name": "format", - "description": "The file format to save to. It must be one of the values that the server reports as supported output file formats, which usually correspond to the short GDAL/OGR codes. This parameter is *case insensitive*.", + "description": "The file format to save to. It must be one of the values that the server reports as supported output file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for storing the underlying data structure, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*.", "schema": { "type": "string", "subtype": "output-format" @@ -9221,7 +9585,7 @@ }, { "name": "options", - "description": "The file format options to be used to create the file(s). Must correspond to the options that the server reports as supported options for the chosen `format`. The option names and valid values usually correspond to the GDAL/OGR format options.", + "description": "The file format parameters to be used to create the file(s). Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options.", "schema": { "type": "object", "subtype": "output-format-options" @@ -9236,6 +9600,11 @@ "type": "boolean" } }, + "exceptions": { + "FormatUnsuitable": { + "message": "Data can't be transformed into the requested output format." + } + }, "links": [ { "rel": "about", @@ -9252,7 +9621,7 @@ { "id": "sd", "summary": "Standard deviation", - "description": "Computes the sample standard deviation, which quantifies the amount of variation of an array of numbers. It is defined to be the square root of the corresponding variance (see ``variance()``).\n\nA low standard deviation indicates that the values tend to be close to the expected value, while a high standard deviation indicates that the values are spread out over a wider range.", + "description": "Computes the sample standard deviation, which quantifies the amount of variation of an array of numbers. It is defined to be the square root of the corresponding variance (see ``variance()``).\n\nA low standard deviation indicates that the values tend to be close to the expected value, while a high standard deviation indicates that the values are spread out over a wider range.\n\nAn array without non-`null` elements resolves always with `null`.", "categories": [ "math", "reducer" @@ -9260,7 +9629,7 @@ "parameters": [ { "name": "data", - "description": "An array of numbers. An empty array resolves always with `null`.", + "description": "An array of numbers.", "schema": { "type": "array", "items": { @@ -10460,8 +10829,8 @@ }, { "id": "trim_cube", - "summary": "Remove slices with no-data values", - "description": "Removes slices solely containing no-data values. If the dimension is irregular categorical then slices in the middle can be removed.", + "summary": "Remove dimension labels with no-data values", + "description": "Removes dimension labels solely containing no-data values. If the dimension is irregular categorical then dimension labels in the middle can be removed.", "categories": [ "cubes" ], @@ -10476,7 +10845,7 @@ } ], "returns": { - "description": "A trimmed raster data cube.", + "description": "A trimmed raster data cube with the same dimensions. The dimension properties name, type, reference system and resolution remain unchanged. The number of dimension labels may decrease.", "schema": { "type": "object", "subtype": "raster-cube" @@ -10486,7 +10855,7 @@ { "id": "variance", "summary": "Variance", - "description": "Computes the sample variance of an array of numbers by calculating the square of the standard deviation (see ``sd()``). It is defined to be the expectation of the squared deviation of a random variable from its expected value. Basically, it measures how far the numbers in the array are spread out from their average value.", + "description": "Computes the sample variance of an array of numbers by calculating the square of the standard deviation (see ``sd()``). It is defined to be the expectation of the squared deviation of a random variable from its expected value. Basically, it measures how far the numbers in the array are spread out from their average value.\n\nAn array without non-`null` elements resolves always with `null`.", "categories": [ "math", "reducer" @@ -10494,7 +10863,7 @@ "parameters": [ { "name": "data", - "description": "An array of numbers. An empty array resolves always with `null`.", + "description": "An array of numbers.", "schema": { "type": "array", "items": { diff --git a/tests/assets/undefined_param.json b/tests/assets/undefined_param.json new file mode 100644 index 0000000..9e07957 --- /dev/null +++ b/tests/assets/undefined_param.json @@ -0,0 +1,13 @@ +{ + "process_graph":{ + "dc":{ + "process_id":"load_collection", + "arguments":{ + "id":{"from_parameter": "cid"}, + "spatial_extent":null, + "temporal_extent":null + }, + "result": true + } + } +} \ No newline at end of file diff --git a/tests/jsonschema.test.js b/tests/jsonschema.test.js index 0ef536c..6d4b447 100644 --- a/tests/jsonschema.test.js +++ b/tests/jsonschema.test.js @@ -1,6 +1,8 @@ const JsonSchemaValidator = require('../src/jsonschema'); +const ProcessRegistry = require('../src/registry'); +const Utils = require('../src/utils'); const epsg = require('epsg-index/all.json'); -const Utils = require('@openeo/js-commons/src/utils.js'); +const PROCESSES = require('./assets/processes.json'); process.on('unhandledRejection', r => console.log(r)); @@ -56,6 +58,10 @@ describe('JSON Schema Validator Tests', () => { await expectError(v, -4326, epsgSchema); }); test('epsg-code with list', async () => { + expect(Utils.size(v.epsgCodes)).toBe(0); + v.setEpsgCodes(null); + expect(Utils.size(v.epsgCodes)).toBe(0); + v.setEpsgCodes(Object.keys(epsg)); await expectSuccess(v, 2000, epsgSchema); await expectSuccess(v, 3857, epsgSchema); @@ -160,6 +166,17 @@ describe('JSON Schema Validator Tests', () => { "type": "string", "subtype": "output-format" }; + + test('input/output-format', async() => { + expect(v.fileFormats).toHaveProperty("input"); + expect(v.fileFormats).toHaveProperty("output"); + expect(Utils.size(v.fileFormats.input)).toBe(0); + expect(Utils.size(v.fileFormats.output)).toBe(0); + v.setFileFormats(null); + expect(Utils.size(v.fileFormats.input)).toBe(0); + expect(Utils.size(v.fileFormats.output)).toBe(0); + }); + test('output-format', async () => { // No file formats set => succeed always await expectSuccess(v, "GTiff", outputFormatSchema); @@ -190,17 +207,56 @@ describe('JSON Schema Validator Tests', () => { await expectError(v, "", inputFormatSchema); }); + var wkt2Value = 'GEOGCRS["WGS 84",DATUM["World Geodetic System 1984",ELLIPSOID["WGS 84",6378137,298.257223563,LENGTHUNIT["metre",1]]],PRIMEM["Greenwich",0,ANGLEUNIT["degree",0.0174532925199433]],CS[ellipsoidal,2],AXIS["geodetic latitude (Lat)",north,ORDER[1],ANGLEUNIT["degree",0.0174532925199433]],AXIS["geodetic longitude (Lon)",east,ORDER[2],ANGLEUNIT["degree",0.0174532925199433]],ID["EPSG",4326]]'; + var wkt1Value = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AXIS["Latitude",NORTH],AXIS["Longitude",EAST],AUTHORITY["EPSG","4326"]]'; + var projValue = "+proj=moll +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs"; var projSchema = { "type": "string", "subtype": "proj-definition" }; test('proj-definition', async () => { await expectSuccess(v, "+proj=utm +zone=32 +datum=WGS84", projSchema); - await expectSuccess(v, "+proj=moll +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs", projSchema); + await expectSuccess(v, projValue, projSchema); + await expectSuccess(v, projValue.toUpperCase(), projSchema); + await expectError(v, wkt1Value, projSchema); + await expectError(v, wkt2Value, projSchema); await expectError(v, "EPSG:32632", projSchema); await expectError(v, "", projSchema); }); + var wkt2Schema = { + "type": "string", + "subtype": "wkt2-definition" + }; + test('wkt2-definition', async () => { + await expectSuccess(v, wkt2Value, wkt2Schema); + await expectSuccess(v, wkt2Value.toLowerCase(), wkt2Schema); + await expectError(v, wkt1Value, wkt2Schema); + await expectError(v, projValue, wkt2Schema); + await expectError(v, "EPSG:32632", wkt2Schema); + await expectError(v, "", wkt2Schema); + }); + + + var pgSchema = { + "type": "object", + "subtype": "process-graph" + }; + const ProcessGraphEVI = require('./assets/evi.json'); + const invalidPg = { + process_graph: { + "123": { + process_id: "absolute", + arguments: {} + } + } + }; + test('process-graph', async () => { + await expectSuccess(v, ProcessGraphEVI, pgSchema); + await expectError(v, invalidPg, pgSchema); + await expectError(v, {}, pgSchema); + }); + var temporalIntervalSchema = { "type": "array", "subtype": "temporal-interval" diff --git a/tests/node.test.js b/tests/node.test.js index 2b33417..810892a 100644 --- a/tests/node.test.js +++ b/tests/node.test.js @@ -1,60 +1,150 @@ const Node = require('../src/node'); +const ProcessGraph = require('../src/processgraph'); -var contextObj = [ - {from_parameter: "foo"}, - {from_node: "bar"} -]; +var from_foo = { from_parameter: "foo" }; +var fooDefault = "foobar"; +var from_bar = { from_node: "bar" }; +var from_x = { from_parameter: "x" }; + +var subProcess = { + process_graph: { + absolute: { + process_id: "absolute", + arguments: { + x: from_x + }, + result: true + } + } +}; + +var contextRefs = [from_foo,from_bar]; +var contextAdditionals = [1, 2]; +var contextObj = contextRefs.concat(contextAdditionals); +var contextResult = [fooDefault, undefined].concat(contextAdditionals); var nodeObj = { process_id: "apply", arguments: { - process: { - process_graph: { - absolute: { - process_id: "absolute", - arguments: { - x: {from_parameter: "x"} - } - } - } - }, - context: contextObj.concat([1,2]) + process: subProcess, + context: contextObj, + parameter: from_foo, + result: from_bar, + number: 123 }, description: "Test", result: true }; +var barNodeObj = { + process_id: "bar", + arguments: {} +}; + +var process = { + parameters: [ + { + name: "foo", + default: fooDefault, + optional: true + } + ], + process_graph: { + "bar": barNodeObj, + "example": nodeObj + } +}; + describe('Node tests', () => { - var node; - test('Init', () => { - node = new Node(nodeObj, "123", null); - expect(node instanceof Node).toBe(true); - expect(node.process_id).toBe("apply"); - expect(node.description).toBe("Test"); - expect(node.isResultNode).toBeTruthy(); - expect(node.getProcessGraph()).toBeNull(); - expect(node.getParent()).toBeNull(); - expect(node.isStartNode()).toBeTruthy(); - expect(node.getPreviousNodes()).toEqual([]); - expect(node.getNextNodes()).toEqual([]); - }); - test('Errors', () => { - expect(() => new Node(null, 123)).toThrow(); - expect(() => new Node(null, "123")).toThrow(); - expect(() => new Node({}, "123")).toThrow(); - }); - test('General argument handling', () => { - expect(node.getArgumentNames()).toEqual(["process", "context"]); - expect(node.hasArgument("process")).toBeTruthy(); - expect(node.hasArgument("context")).toBeTruthy(); - expect(node.hasArgument("data")).toBeFalsy(); - expect(node.getArgumentType("process")).toBe("callback"); - expect(node.getArgumentType("context")).toBe("array"); - // getRawArgument(Value) - }); - test('Refs', () => { - expect(node.getArgumentRefs("process")).toEqual([]); - expect(node.getArgumentRefs("context")).toEqual(contextObj); - expect(node.getRefs()).toEqual(contextObj); - }); + test('Errors', () => { + expect(() => new Node(null, 123)).toThrow(); + expect(() => new Node(null, "123")).toThrow(); + expect(() => new Node({}, "123")).toThrow(); + }); + + var node; + var pg; + test('Parse', () => { + pg = new ProcessGraph(process); + pg.fillUndefinedParameters(false); + expect(() => pg.parse()).not.toThrow(); + node = pg.getNode("example"); + }); + + test('Basics', () => { + expect(node instanceof Node).toBe(true); + expect(node.process_id).toBe("apply"); + expect(node.description).toBe("Test"); + expect(node.isResultNode).toBeTruthy(); + expect(node.getProcessGraph()).toBe(pg); + expect(node.getParent()).toBeNull(); + expect(node.isStartNode()).toBeFalsy(); + expect(node.isResultNode).toBeTruthy(); + expect(node.getPreviousNodes()).toEqual([pg.getNode("bar")]); + expect(node.getNextNodes()).toEqual([]); + expect(node.toJSON()).toEqual(nodeObj); + }); + + test('Argument handling', () => { + expect(node.getArgumentNames()).toEqual(["process", "context", "parameter", "result", "number"]); + + expect(node.hasArgument("process")).toBeTruthy(); + expect(node.hasArgument("context")).toBeTruthy(); + expect(node.hasArgument("parameter")).toBeTruthy(); + expect(node.hasArgument("result")).toBeTruthy(); + expect(node.hasArgument("number")).toBeTruthy(); + expect(node.hasArgument("data")).toBeFalsy(); + + expect(node.getArgumentType("process")).toEqual("callback"); + expect(node.getArgumentType("context")).toEqual("array"); + expect(node.getArgumentType("parameter")).toEqual("parameter"); + expect(node.getArgumentType("result")).toEqual("result"); + expect(node.getArgumentType("number")).toEqual("number"); + expect(node.getArgumentType("data")).toEqual("undefined"); + + expect(node.getArgument("process") instanceof ProcessGraph).toBeTruthy(); + expect(node.getArgument("context")).toEqual(contextResult); + expect(node.getArgument("parameter")).toEqual(fooDefault); + expect(node.getArgument("result")).toBeUndefined(); + expect(node.getArgument("number")).toEqual(123); + expect(node.getArgument("data")).toBeUndefined(); + }); + + test('Raw Argument handling', () => { + expect(node.getRawArgument("process") instanceof ProcessGraph).toBeTruthy(); + expect(node.getRawArgument("context")).toEqual(contextObj); + expect(node.getRawArgument("parameter")).toEqual(from_foo); + expect(node.getRawArgument("result")).toEqual(from_bar); + expect(node.getRawArgument("number")).toEqual(123); + expect(node.getRawArgument("data")).toBeUndefined(); + + expect(node.getRawArgumentValue("process") instanceof ProcessGraph).toBeTruthy(); + expect(node.getRawArgumentValue("context")).toEqual(contextObj); + expect(node.getRawArgumentValue("parameter")).toEqual(from_foo.from_parameter); + expect(node.getRawArgumentValue("result")).toEqual(from_bar.from_node); + expect(node.getRawArgumentValue("number")).toEqual(123); + expect(node.getRawArgumentValue("data")).toBeUndefined(); + }); + + test('Refs', () => { + expect(node.getArgumentRefs("process")).toEqual([]); + expect(node.getArgumentRefs("context")).toEqual(contextRefs); + expect(node.getRefs()).toEqual(contextRefs); + }); + + test('Description', () => { + node.setDescription(""); + expect(node.getDescription()).toEqual(""); + node.setDescription("Foo Bar"); + expect(node.getDescription()).toEqual("Foo Bar"); + node.setDescription(""); + node.setDescription({}); + expect(node.getDescription()).toBeNull(); + node.setDescription(""); + node.setDescription(123); + expect(node.getDescription()).toBeNull(); + node.setDescription(""); + node.setDescription(null); + expect(node.getDescription()).toBeNull(); + }); }); \ No newline at end of file diff --git a/tests/processgraph.test.js b/tests/processgraph.test.js index a8aa0cc..5b14245 100644 --- a/tests/processgraph.test.js +++ b/tests/processgraph.test.js @@ -1,69 +1,260 @@ -const ProcessGraphEVI = require('./assets/evi.json'); const PROCESSES = require('./assets/processes.json'); const ProcessGraph = require('../src/processgraph'); const ProcessRegistry = require('../src/registry'); +const BaseProcess = require('../src/process'); process.on('unhandledRejection', r => console.log(r)); +class Queue { + constructor() { + this.list = []; + } + push(e) { + this.list.push(e); + } + clear() { + this.list = []; + } + all() { + return this.list; + } +} + +class ProcessImpl extends BaseProcess { + constructor(spec, queue) { + super(spec); + this.queue = queue; + } + async execute(node) { + this.queue.push(node.id); + + // Execute callbacks + let args = node.getArgumentNames().filter(arg => node.getArgumentType(arg) === 'callback'); + for(var name of args) { + let callback = node.getArgument(name); + await callback.execute(callback.getCallbackParameters()); + } + + return node.id; + } +} + +var registry; describe('Process Graph Tests', () => { - var registry; + const q = new Queue(); beforeAll(() => { - registry = new ProcessRegistry(PROCESSES); + registry = new ProcessRegistry(PROCESSES.map(p => new ProcessImpl(p, q))); }); - test('Parser & Validator > Empty process throws by default', async () => { + test('Parser > Empty process throws by default', async () => { + var pg = new ProcessGraph(null, registry); + expect(() => pg.parse()).toThrow(); var pg = new ProcessGraph({}, registry); expect(() => pg.parse()).toThrow(); }); - test('Parser & Validator > Empty process allowed', async () => { + test('Parser > Empty process allowed', async () => { var pg = new ProcessGraph({}, registry); pg.allowEmpty(); expect(() => pg.parse()).not.toThrow(); }); - test('Parser & Validator > Invalid process graph throws', async () => { + test('Parser > Invalid process graph throws', async () => { var pg = new ProcessGraph({process_graph: null}, registry); expect(() => pg.parse()).toThrow(); }); - test('Parser & Validator > Empty process graph fails', async () => { - try { - var process = { - parameters: [], - process_graph: {} - }; - var pg = new ProcessGraph(process, registry); - var errors = await pg.validate(false); - expect(errors.count()).toBeGreaterThan(0); - expect(pg.isValid()).toBe(false); - expect(pg.getErrors()).toStrictEqual(errors); - expect(pg.toJSON()).toStrictEqual(process); - } catch(e) { - console.log(e); - expect(e).toBeNull(); - } + test('Parser > Empty process graph throws', async () => { + var pg = new ProcessGraph({process_graph: {}}, registry); + expect(() => pg.parse()).toThrow(); }); - test('Parser & Validator > parse EVI without registry', async () => { + test('Parser > Multiple result nodes throw', async () => { + let absNode = { + process_id: "absolute", + arguments: { + x: -1 + }, + result: true + }; + var pg = new ProcessGraph({ + process_graph: { + "abs1": absNode, + "abs2": absNode + } + }, registry); + expect(() => pg.parse()).toThrow(); + }); + + test('Parser > No result node throws', async () => { + let absNode = { + process_id: "absolute", + arguments: { + x: -1 + } + }; + var pg = new ProcessGraph({ + process_graph: { + "abs1": absNode, + "abs2": absNode + } + }, registry); + expect(() => pg.parse()).toThrow(); + }); + + test('Parser > Throw on circular refs', async () => { + var pg = new ProcessGraph({ + process_graph: { + "abs1": { + process_id: "absolute", + arguments: { + x: { + from_node: "abs2" + } + } + }, + "abs2": { + process_id: "absolute", + arguments: { + x: { + from_node: "abs1" + } + }, + result: true + } + } + }, registry); + expect(() => pg.parse()).toThrow(); + }); + + const ProcessGraphEVI = require('./assets/evi.json'); + test('Parse > parse EVI without registry', async () => { var pg = new ProcessGraph(ProcessGraphEVI); expect(() => pg.parse()).not.toThrow(); expect(pg.getStartNodeIds()).toEqual(["dc"]); }); - test('Parser & Validator > validate EVI with registry', async () => { + test('Validator > validate EVI with registry', async () => { var pg = new ProcessGraph(ProcessGraphEVI, registry); var errors = await pg.validate(false); - if (errors.count() > 0) { - console.log(errors.getMessage()); - } - expect(errors.count()).toBe(0); + expect(errors.getAll()).toEqual([]); expect(pg.isValid()).toBe(true); expect(pg.getErrors()).toStrictEqual(errors); expect(pg.getStartNodeIds()).toEqual(["dc"]); expect(pg.toJSON()).toStrictEqual(ProcessGraphEVI); + expect(pg.getNodeCount()).toBe(4); + }); + + const ProcessGraphInvalidArgs = require('./assets/invalid_args.json'); + test('Validator > throw on invalid argument in object', async () => { + await validateFailsWith(ProcessGraphInvalidArgs, "The argument 'spatial_extent' in process 'load_collection' is invalid"); + }); + + const ProcessGraphParamInObj = require('./assets/param_in_obj_arg.json'); + test('Validator > not throw on parameter in object', async () => { + await validateSucceeds(ProcessGraphParamInObj); + }); + + const ProcessGraphUndefinedParam = require('./assets/undefined_param.json'); + test('Validator > do NOT allow undefined param', async () => { + var pg = new ProcessGraph(ProcessGraphUndefinedParam, registry); + pg.allowUndefinedParameters(false); + await validateFailsWith(pg, "Invalid parameter 'cid' requested in the process"); }); + test('Validator > allow undefined param', async () => { + var pg = new ProcessGraph(ProcessGraphUndefinedParam, registry); + pg.allowUndefinedParameters(true); + await validateSucceeds(pg); + }); + test('Validator > Fill parameters for undefined parameter refs', async () => { + var pg = new ProcessGraph(ProcessGraphUndefinedParam, registry); + pg.fillUndefinedParameters(); + await pg.validate(); + let param = pg.getParameter('cid'); + expect(param).not.toBeNull(); + expect(param).toHaveProperty('name'); + expect(param).toHaveProperty('description'); + expect(param).toHaveProperty('schema'); + expect(param.name).toBe('cid'); + }); + + test('Validator > Argument unsupported throws', async () => { + let pg = { + process_graph: { + "abs1": { + process_id: "absolute", + arguments: { + z: -1 + }, + result: true + } + } + }; + validateFailsWith(pg, "Process 'absolute' does not support the following arguments: z"); + }); + + test('Validator > Missing argument throws', async () => { + let pg = { + process_graph: { + "abs1": { + process_id: "absolute", + arguments: {}, + result: true + } + } + }; + validateFailsWith(pg, "Process 'absolute' requires argument 'x'."); + }); + + test('Validator > Process missing', async () => { + let pg = { + process_graph: { + "foobar": { + process_id: "foo", + arguments: {}, + result: true + } + } + }; + validateFailsWith(pg, "Process 'foo' is not supported."); + }); + + test('Executor > execute EVI with registry', async () => { + var pg = new ProcessGraph(ProcessGraphEVI, registry); + q.clear(); + var resultNode = await pg.execute(); + expect(pg.isValid()).toBe(true); + expect(pg.getErrors().count()).toEqual(0); + expect(resultNode.getResult()).toEqual(resultNode.id); + expect(q.all()).toEqual(["dc", "evi", "blue", "nir", "red", "p2", "p1", "sub", "sum", "div", "p3", "mintime", "min", "save"]); + }); + +}); + +async function validateFailsWith(pg, msg) { + if (!(pg instanceof ProcessGraph)) { + pg = new ProcessGraph(pg, registry); + } + try { + await pg.validate(); + } + catch (error) { + expect(error.message).toContain(msg); + } + expect(pg.isValid()).toBeFalsy(); +} - }); \ No newline at end of file +async function validateSucceeds(pg) { + if (!(pg instanceof ProcessGraph)) { + pg = new ProcessGraph(pg, registry); + } + try { + await pg.validate(); + } + catch (error) { + expect(error).toBeUndefined(); + } + expect(pg.isValid()).toBeTruthy(); +} \ No newline at end of file diff --git a/tests/registry.test.js b/tests/registry.test.js index 77fd7fb..356477f 100644 --- a/tests/registry.test.js +++ b/tests/registry.test.js @@ -13,11 +13,8 @@ describe('Registry Tests', () => { expect(registry.count()).toBe(PROCESSES.length); }); - var processName = "absolute"; test('Get process', () => { - var absolute = registry.get(processName); - expect(absolute).toBeInstanceOf(BaseProcess); - expect(absolute.id).toBe(processName); + checkAbsolute(registry); var x = registry.get("unknown-process"); expect(x).toBeNull(); @@ -29,4 +26,32 @@ describe('Registry Tests', () => { expect(schemas.length).toBe(PROCESSES.length); }); - }); \ No newline at end of file + test('Get all specifications', () => { + expect(registry.all()).toEqual(PROCESSES.map(p => new BaseProcess(p))); + }); + + test('Add specifications individually', () => { + expect(() => registry.add(null)).toThrowError(); + expect(() => registry.add({description: "Test"})).toThrowError(); + + let registry2 = new ProcessRegistry(); + class absolute { + constructor() { + this.spec = registry.get("absolute"); + } + toJSON() { + return this.spec; + } + } + registry2.add(new absolute()); + checkAbsolute(registry2); + }); + + }); + + function checkAbsolute(reg) { + var processName = "absolute"; + var absolute = reg.get(processName); + expect(absolute).toBeInstanceOf(BaseProcess); + expect(absolute.id).toBe(processName); + } \ No newline at end of file diff --git a/tests/utils.test.js b/tests/utils.test.js new file mode 100644 index 0000000..d1b6931 --- /dev/null +++ b/tests/utils.test.js @@ -0,0 +1,104 @@ +const Utils = require('../src/utils'); + +describe('Utils Tests', () => { + + let param1 = {from_parameter: "123"}; + let result1 = {from_node: "123"}; + + test('getType', () => { + expect(Utils.getType(null)).toBe('null'); + expect(Utils.getType(123)).toBe('number'); + expect(Utils.getType(123.45)).toBe('number'); + class X {} + expect(Utils.getType(new X())).toBe('object'); + expect(Utils.getType({})).toBe('object'); + expect(Utils.getType(param1)).toBe('parameter'); + expect(Utils.getType(result1)).toBe('result'); + expect(Utils.getType({process_graph: {}})).toBe('callback'); + expect(Utils.getType([])).toBe('array'); + expect(Utils.getType(true)).toBe('boolean'); + expect(Utils.getType("123")).toBe('string'); + expect(Utils.getType(() => {})).toBe('function'); + expect(Utils.getType(Utils)).toBe('function'); + expect(Utils.getType(undefined)).toBe('undefined'); + }); + + let paramDeep1 = [{deep: param1}, param1, {deep: {deeper: param1 }}]; + let noRef = [{foo: "bar"}, {hello: 123}]; + let from_x = {from_parameter: "x"}; + let subProcess = { + process_graph: { + absolute: { + process_id: "absolute", + arguments: { + x: from_x + }, + result: true + } + } + }; + let pg = { + process_graph: { + example: { + process_id: "apply", + arguments: { + process: subProcess, + context: result1 + }, + result: true + } + } + }; + let pg2 = { + process_graph: { + example: { + process_id: "apply", + arguments: { + process: subProcess + }, + result: true + } + } + }; + let deepRefs = [result1, from_x]; + let shallowRefs = [result1]; + test('getRefs', () => { + compareRefs(Utils.getRefs(param1), [param1]); + compareRefs(Utils.getRefs(paramDeep1), [param1]); + compareRefs(Utils.getRefs(result1), [result1]); + compareRefs(Utils.getRefs(noRef), []); + compareRefs(Utils.getRefs(null), []); + compareRefs(Utils.getRefs("from_parameter"), []); + compareRefs(Utils.getRefs(pg, false), shallowRefs); + compareRefs(Utils.getRefs(pg, true), deepRefs); + compareRefs(Utils.getRefs(pg2, false), []); + }); + + test('containsRef', () => { + expect(Utils.containsRef(param1)).toEqual(true); + expect(Utils.containsRef(paramDeep1)).toEqual(true); + expect(Utils.containsRef(result1)).toEqual(true); + expect(Utils.containsRef(noRef)).toEqual(false); + expect(Utils.containsRef(null)).toEqual(false); + expect(Utils.containsRef("from_parameter")).toEqual(false); + expect(Utils.containsRef(pg, false)).toEqual(true); + expect(Utils.containsRef(pg, true)).toEqual(true); + expect(Utils.containsRef(pg2, false)).toEqual(false); + expect(Utils.containsRef(pg2, true)).toEqual(true); + }); + +}); + +function sortRefs(arr) { + if (!Array.isArray(arr)) { + return arr; + } + return arr.map(e => { + let key = Object.keys(e)[0]; + return key + ":" + e[key]; + }).sort(); +} + +function compareRefs(arr1, arr2) { + expect(sortRefs(arr1)).toEqual(sortRefs(arr2)); +} \ No newline at end of file