From 9bfd11438e539f5fab676be4640873e0a91d4f1a Mon Sep 17 00:00:00 2001 From: robyngit Date: Wed, 13 Nov 2024 16:29:45 -0500 Subject: [PATCH] Fix JS docs for DataPackage Issue #2567 --- src/js/collections/DataPackage.js | 948 ++++++++++++++++++------------ 1 file changed, 558 insertions(+), 390 deletions(-) diff --git a/src/js/collections/DataPackage.js b/src/js/collections/DataPackage.js index 269f2d0ad..a51ffa671 100644 --- a/src/js/collections/DataPackage.js +++ b/src/js/collections/DataPackage.js @@ -27,9 +27,8 @@ define([ ) => { /** * @class DataPackage - * @classdesc A DataPackage represents a hierarchical collection of - packages, metadata, and data objects, modeling an OAI-ORE RDF graph. - TODO: incorporate Backbone.UniqueModel + * @classdesc A DataPackage represents a hierarchical collection of packages, + * metadata, and data objects, modeling an OAI-ORE RDF graph. * @classcategory Collections * @name DataPackage * @augments Backbone.Collection @@ -50,17 +49,17 @@ define([ id: null, /** - * The type of the object (DataPackage, Metadata, Data) - * Simple queue to enqueue file transfers. Use push() and shift() - * to add and remove items. If this gets to large/slow, possibly - * switch to http://code.stephenmorley.org/javascript/queues/ + * The type of the object (DataPackage, Metadata, Data) Simple queue to + * enqueue file transfers. Use push() and shift() to add and remove items. + * If this gets to large/slow, possibly switch to + * http://code.stephenmorley.org/javascript/queues/ * @type {DataPackage|Metadata|Data[]} */ transferQueue: [], /** - * A flag ued for the package's edit status. Can be - * set to false to 'lock' the package + * A flag used for the package's edit status. Can be set to false to + * 'lock' the package * @type {boolean} */ editable: true, @@ -78,29 +77,29 @@ define([ packageModel: null, /** - * The science data identifiers associated with this - * data package (from cito:documents), mapped to the science metadata - * identifier that documents it - * Not to be changed after initial fetch - this is to keep track of the relationships in their original state + * The science data identifiers associated with this data package (from + * cito:documents), mapped to the science metadata identifier that + * documents it. Not to be changed after initial fetch - this is to keep + * track of the relationships in their original state * @type {object} */ originalIsDocBy: {}, /** * An array of ids that are aggregated in the resource map on the server. - * Taken from the original RDF XML that was fetched from the server. - * Used for comparing the original aggregation with the aggregation of this collection. + * Taken from the original RDF XML that was fetched from the server. Used + * for comparing the original aggregation with the aggregation of this + * collection. * @type {string[]} */ originalMembers: [], /** - * Keep the collection sorted by model "sortOrder". The three model types are ordered as: - * Metadata: 1 - * Data: 2 - * DataPackage: 3 - * See getMember(). We do this so that Metadata get rendered first, and Data are - * rendered as DOM siblings of the Metadata rows of the DataPackage table. + * Used to keep the collection sorted by model "sortOrder". The three + * model types are ordered as: Metadata: 1; Data: 2; DataPackage: 3. See + * getMember(). We do this so that Metadata get rendered first, and Data + * are rendered as DOM siblings of the Metadata rows of the DataPackage + * table. * @type {string} */ comparator: "sortOrder", @@ -112,22 +111,23 @@ define([ nodeLevel: 0, /** - * The SolrResults collection associated with this DataPackage. - * This can be used to fetch the package from Solr by passing the 'fromIndex' option - * to fetch(). + * The SolrResults collection associated with this DataPackage. This can + * be used to fetch the package from Solr by passing the 'fromIndex' + * option to fetch(). * @type {SolrResults} */ solrResults: new SolrResults(), /** - * A Filter model that should filter the Solr index for only the - * objects aggregated by this package. + * A Filter model that should filter the Solr index for only the objects + * aggregated by this package. * @type {Filter} */ filterModel: null, /** - * Define the namespaces used in the RDF XML + * Namespaces used in the RDF XML. The key is the prefix and the value is + * the namespace URI. * @type {object} */ namespaces: { @@ -143,22 +143,36 @@ define([ PROVONE: "http://purl.dataone.org/provone/2015/01/15/ontology#", }, + /** + * Package members that are sources in provenance relationships. + * @type {DataONEObject[]} + */ sources: [], + + /** + * Package members that are derivations in provenance relationships. + * @type {DataONEObject[]} + */ derivations: [], + + /** + * Set to "complete" to signal that all prov queries have finished + * @type {string|null} + */ provenanceFlag: null, - sourcePackages: [], - derivationPackages: [], - relatedModels: [], /** - * Contains provenance relationships added or deleted to this DataONEObject. - * Each entry is [operation ('add' or 'delete'), prov field name, object id], i.e. ['add', 'prov_used', 'urn:uuid:5678'] + * Contains provenance relationships added or deleted to this + * DataONEObject. Each entry is [operation ('add' or 'delete'), prov field + * name, object id], i.e. ['add', 'prov_used', 'urn:uuid:5678'] + * @type {string[][]} */ provEdits: [], /** * The number of models that have been updated during the current save(). * This is reset to zero after the current save() is complete. + * @type {number} */ numSaves: 0, @@ -212,8 +226,14 @@ define([ return this; }, - // Build the DataPackage URL based on the MetacatUI.appModel.objectServiceUrl - // and id or seriesid + /** + * Build the DataPackage URL based on the + * MetacatUI.appModel.objectServiceUrl and id or seriesid + * @param {object} [options] - Optional options for this URL + * @param {boolean} [options.update] - If true, this URL will be for + * updating the package + * @returns {string} The URL for this DataPackage + */ url(options) { if (options && options.update) { return ( @@ -235,10 +255,13 @@ define([ return MetacatUI.appModel.get("resolveServiceUrl") + encodedId; }, - /* - * The DataPackage collection stores DataPackages and - * DataONEObjects, including Metadata and Data objects. - * Return the correct model based on the type + /** + * The DataPackage collection stores DataPackages and DataONEObjects, + * including Metadata and Data objects. Return the correct model based on + * the type + * @param {object} attrs - The attributes of the model + * @param {object} options - Options to pass to the instantiated model + * @returns {DataONEObject|ScienceMetadata|EML211|DataPackage} The model */ // eslint-disable-next-line object-shorthand, func-names model: function (attrs, options) { @@ -411,13 +434,16 @@ define([ }, /** - * Overload fetch calls for a DataPackage - * @param {object} [options] - Optional options for this fetch that get sent with the XHR request - * @property {boolean} fetchModels - If false, this fetch will not fetch - * each model in the collection. It will only get the resource map object. - * @property {boolean} fromIndex - If true, the collection will be fetched from Solr rather than - * fetching the system metadata of each model. Useful when you only need to retrieve limited information about - * each package member. Set query-specific parameters on the `solrResults` SolrResults set on this collection. + * Overload fetch calls for a DataPackage + * @param {object} [options] - Optional options for this fetch that get + * sent with the XHR request + * @property {boolean} fetchModels - If false, this fetch will not fetch + * each model in the collection. It will only get the resource map object. + * @property {boolean} fromIndex - If true, the collection will be fetched + * from Solr rather than fetching the system metadata of each model. + * Useful when you only need to retrieve limited information about each + * package member. Set query-specific parameters on the `solrResults` + * SolrResults set on this collection. * @returns {jqXHR} The jQuery XMLHttpRequest for the request */ fetch(options = {}) { @@ -428,9 +454,11 @@ define([ // If the fetchModels property is set to false, if (fetchOptions.fetchModels === false) { - // Save the property to the Collection itself so it is accessible in other functions + // Save the property to the Collection itself so it is accessible in + // other functions this.fetchModels = false; - // Remove the property from the options Object since we don't want to send it with the XHR + // Remove the property from the options Object since we don't want to + // send it with the XHR delete fetchOptions.fetchModels; this.once("reset", this.triggerComplete); } @@ -442,9 +470,9 @@ define([ const thisPackage = this; - // Function to retry fetching with user login details if the initial fetch fails - // eslint-disable-next-line func-names - const retryFetch = function () { + // Function to retry fetching with user login details if the initial + // fetch fails eslint-disable-next-line func-names + const retryFetch = () => { // Add the authorization options const authFetchOptions = _.extend( fetchOptions, @@ -473,7 +501,8 @@ define([ * Deserialize a Package from OAI-ORE RDF XML * @param {string} response - The RDF/XML string to parse * @param {object} _options - Options for parsing the RDF/XML - * @returns {DataPackage[]} - An array of models that were parsed from the RDF/XML + * @returns {DataPackage[]} - An array of models that were parsed from the + * RDF/XML */ parse(response, _options) { // Save the raw XML in case it needs to be used later @@ -484,13 +513,13 @@ define([ const ORE = this.rdf.Namespace(this.namespaces.ORE); const CITO = this.rdf.Namespace(this.namespaces.CITO); const PROV = this.rdf.Namespace(this.namespaces.PROV); - // The following are not used: - // const XSD = this.rdf.Namespace(this.namespaces.XSD); - // const RDF = this.rdf.Namespace(this.namespaces.RDF); - // const FOAF = this.rdf.Namespace(this.namespaces.FOAF); - // const OWL = this.rdf.Namespace(this.namespaces.OWL); - // const DC = this.rdf.Namespace(this.namespaces.DC); - // const DCTERMS = this.rdf.Namespace(this.namespaces.DCTERMS); + // The following are not used: const XSD = + // this.rdf.Namespace(this.namespaces.XSD); const RDF = + // this.rdf.Namespace(this.namespaces.RDF); const FOAF = + // this.rdf.Namespace(this.namespaces.FOAF); const OWL = + // this.rdf.Namespace(this.namespaces.OWL); const DC = + // this.rdf.Namespace(this.namespaces.DC); const DCTERMS = + // this.rdf.Namespace(this.namespaces.DCTERMS); let memberStatements = []; let atLocationStatements = []; // array to store atLocation statements @@ -509,7 +538,8 @@ define([ const models = []; // the models returned by parse() try { - // First, make sure we are only using one CN Base URL in the RDF or the RDF parsing will fail. + // First, make sure we are only using one CN Base URL in the RDF or + // the RDF parsing will fail. const cnResolveUrl = MetacatUI.appModel.get("resolveServiceUrl"); const cnURLs = _.uniq( @@ -547,8 +577,9 @@ define([ if (memberPID) memberPIDs.push(memberPID); - // TODO: Test passing merge:true when adding a model and this if statement may not be necessary - // Create a DataONEObject model to represent this collection member and add to the collection + // TODO: Test passing merge:true when adding a model and this if + // statement may not be necessary Create a DataONEObject model to + // represent this collection member and add to the collection if (!_.contains(this.pluck("id"), memberPID)) { memberModel = new DataONEObject({ id: memberPID, @@ -558,7 +589,8 @@ define([ models.push(memberModel); } - // If the model already exists, add this resource map ID to it's list of resource maps + // If the model already exists, add this resource map ID to it's + // list of resource maps else { memberModel = this.get(memberPID); models.push(memberModel); @@ -616,8 +648,8 @@ define([ scimetaID, ]); - // Find the model in this collection for this data object - // var dataObj = this.get(scidataID); + // Find the model in this collection for this data object var + // dataObj = this.get(scidataID); const dataObj = _.find(models, (m) => m.get("id") === scidataID); if (dataObj) { @@ -676,10 +708,11 @@ define([ memberPIDs.unshift(id); }); - // Don't fetch each member model if the fetchModels property on this Collection is set to false + // Don't fetch each member model if the fetchModels property on this + // Collection is set to false if (this.fetchModels !== false) { - // Add the models to the collection now, silently - // this.add(models, {silent: true}); + // Add the models to the collection now, silently this.add(models, + // {silent: true}); // Retrieve the model for each member const collection = this; @@ -689,11 +722,15 @@ define([ // Get the right model type based on the model values const newModel = collection.getMember(oldModel); - // If the model type has changed, then mark the model as unsynced, since there may be custom fetch() options for the new model + // If the model type has changed, then mark the model as + // unsynced, since there may be custom fetch() options for the + // new model if (oldModel.type !== newModel.type) { - // DataPackages shouldn't be fetched until we support nested packages better in the UI + // DataPackages shouldn't be fetched until we support nested + // packages better in the UI if (newModel.type === "DataPackage") { - // Trigger a replace event so other parts of the app know when a model has been replaced with a different type + // Trigger a replace event so other parts of the app know + // when a model has been replaced with a different type oldModel.trigger("replace", newModel); } else { newModel.set("synced", false); @@ -706,7 +743,8 @@ define([ collection.remove(oldModel); collection.add(fetchedModel); - // Trigger a replace event so other parts of the app know when a model has been replaced with a different type + // Trigger a replace event so other parts of the app know + // when a model has been replaced with a different type oldModel.trigger("replace", newModel); if (newModel.type === "EML") @@ -728,21 +766,24 @@ define([ // TODO: Handle the error } - // trigger complete if fetchModel is false and this is the only object in the package + // trigger complete if fetchModel is false and this is the only object + // in the package if (this.fetchModels === false && models.length === 1) this.triggerComplete(); return models; }, - /* Parse the provenance relationships from the RDF graph, after all DataPackage members - have been fetched, as the prov info will be stored in them. - */ + /** + * Parse the provenance relationships from the RDF graph, after all + * DataPackage members have been fetched, as the prov info will be stored + * in them. + */ parseProv() { try { - /* Now run the SPARQL queries for the provenance relationships */ + // Now run the SPARQL queries for the provenance relationships const provQueries = []; - /* result: pidValue, wasDerivedFromValue (prov_wasDerivedFrom) */ + // result: pidValue, wasDerivedFromValue (prov_wasDerivedFrom) provQueries.prov_wasDerivedFrom = " \n" + @@ -760,7 +801,7 @@ define([ "} \n" + "]]>"; - /* result: pidValue, generatedValue (prov_generated) */ + // result: pidValue, generatedValue (prov_generated) provQueries.prov_generated = " \n" + @@ -780,7 +821,7 @@ define([ "} \n" + "]]>"; - /* result: pidValue, wasInformedByValue (prov_wasInformedBy) */ + // result: pidValue, wasInformedByValue (prov_wasInformedBy) provQueries.prov_wasInformedBy = " \n" + @@ -798,7 +839,7 @@ define([ "} \n" + "]]> \n"; - /* result: pidValue, usedValue (prov_used) */ + // result: pidValue, usedValue (prov_used) provQueries.prov_used = " \n" + @@ -818,7 +859,7 @@ define([ "} \n" + "]]> \n"; - /* result: pidValue, programPidValue (prov_generatesByProgram) */ + // result: pidValue, programPidValue (prov_generatesByProgram) provQueries.prov_generatedByProgram = " \n" + @@ -838,7 +879,7 @@ define([ "} \n" + "]]> \n"; - /* result: pidValue, executionPidValue */ + // result: pidValue, executionPidValue provQueries.prov_generatedByExecution = " \n" + @@ -856,7 +897,7 @@ define([ "} \n" + "]]> \n"; - /* result: pidValue, pid (prov_generatedByProgram) */ + // result: pidValue, pid (prov_generatedByProgram) provQueries.prov_generatedByUser = " \n" + @@ -875,7 +916,7 @@ define([ "} \n" + "]]> \n"; - /* results: pidValue, programPidValue (prov_usedByProgram) */ + // results: pidValue, programPidValue (prov_usedByProgram) provQueries.prov_usedByProgram = " \n" + @@ -895,7 +936,7 @@ define([ "} \n" + "]]> \n"; - /* results: pidValue, executionIdValue (prov_usedByExecution) */ + // results: pidValue, executionIdValue (prov_usedByExecution) provQueries.prov_usedByExecution = " \n" + @@ -913,7 +954,7 @@ define([ "} \n" + "]]> \n"; - /* results: pidValue, pid (prov_usedByUser) */ + // results: pidValue, pid (prov_usedByUser) provQueries.prov_usedByUser = " \n" + @@ -931,7 +972,7 @@ define([ "?primary_data dcterms:identifier ?pid . \n" + "} \n" + "]]> \n"; - /* results: pidValue, executionIdValue (prov_wasExecutedByExecution) */ + // results: pidValue, executionIdValue (prov_wasExecutedByExecution) provQueries.prov_wasExecutedByExecution = " \n" + @@ -950,7 +991,7 @@ define([ "} \n" + "]]> \n"; - /* results: pidValue, pid (prov_wasExecutedByUser) */ + // results: pidValue, pid (prov_wasExecutedByUser) provQueries.prov_wasExecutedByUser = " \n" + @@ -969,7 +1010,7 @@ define([ "} \n" + "]]> \n"; - /* results: pidValue, derivedDataPidValue (prov_hasDerivations) */ + // results: pidValue, derivedDataPidValue (prov_hasDerivations) provQueries.prov_hasDerivations = " \n" + @@ -988,7 +1029,7 @@ define([ "} \n" + "]]> \n"; - /* results: pidValue, pid (prov_instanceOfClass) */ + // results: pidValue, pid (prov_instanceOfClass) provQueries.prov_instanceOfClass = " \n" + @@ -1005,9 +1046,10 @@ define([ "} \n" + "]]> \n"; - // These are the provenance fields that are currently searched for in the provenance queries, but - // not all of these fields are displayed by any view. - // Note: this list is different than the prov list returned by MetacatUI.appSearchModel.getProvFields() + // These are the provenance fields that are currently searched for in + // the provenance queries, but not all of these fields are displayed + // by any view. Note: this list is different than the prov list + // returned by MetacatUI.appSearchModel.getProvFields() this.provFields = [ "prov_wasDerivedFrom", "prov_generated", @@ -1029,15 +1071,15 @@ define([ const keys = Object.keys(provQueries); this.queriesToRun = keys.length; - // Bind the onResult and onDone functions to the model so they can be called out of context + // Bind the onResult and onDone functions to the model so they can be + // called out of context this.onResult = _.bind(this.onResult, this); this.onDone = _.bind(this.onDone, this); - /* Run queries for all provenance fields. - Each query may have multiple solutions and each solution will trigger a callback - to the 'onResult' function. When each query has completed, the 'onDone' function - is called for that query. - */ + // Run queries for all provenance fields. Each query may have multiple + // solutions and each solution will trigger a callback to the + // 'onResult' function. When each query has completed, the 'onDone' + // function is called for that query. for (let iquery = 0; iquery < keys.length; iquery += 1) { const eq = rdf.SPARQLToQuery( provQueries[keys[iquery]], @@ -1056,7 +1098,12 @@ define([ } }, - // The return values have to be extracted from the result. + /** + * The return values have to be extracted from the result. + * @param {object} result - The result of the SPARQL query + * @param {string} name - The name of the field to extract + * @returns {string} - The value of the result + */ getValue(result, name) { const res = result[name]; // The result is of type 'NamedNode', just return the string value @@ -1066,26 +1113,29 @@ define([ return " "; }, - /* This callback is called for every query solution of the SPARQL queries. One - query may result in multple queries solutions and calls to this function. - Each query result returns two pids, i.e. pid: 1234 prov_generated: 5678, - which corresponds to the RDF triple '5678 wasGeneratedBy 1234', or the - DataONE solr document for pid '1234', with the field prov_generated: 5678. - - The result can look like this: - [?pid: t, ?prov_wasDerivedFrom: t, ?primary_data: t, ?derived_data: t] - ?derived_data : t {termType: "NamedNode", value: "https://cn-stage.test.dataone.org/cn/v2/resolve/urn%3Auuid%3Adbbb9a2e-af64-452a-b7b9-122861a5dbb2"} - ?pid : t {termType: "Literal", value: "urn:uuid:dbbb9a2e-af64-452a-b7b9-122861a5dbb2", datatype: t} - ?primary_data : t {termType: "NamedNode", value: "https://cn-stage.test.dataone.org/cn/v2/resolve/urn%3Auuid%3Aaae9d025-a331-4c3a-b399-a8ca0a2826ef"} - ?prov_wasDerivedFrom : t {termType: "Literal", value: "urn:uuid:aae9d025-a331-4c3a-b399-a8ca0a2826ef", datatype: t}] - */ + /** + * This callback is called for every query solution of the SPARQL queries. + * One query may result in multple queries solutions and calls to this + * function. Each query result returns two pids, i.e. pid: 1234 + * prov_generated: 5678, which corresponds to the RDF triple '5678 + * wasGeneratedBy 1234', or the DataONE solr document for pid '1234', with + * the field prov_generated: 5678. + * @param {object} result - The result of the SPARQL query + * @example + * // The result can look like this: + * [?pid: t, ?prov_wasDerivedFrom: t, ?primary_data: t, ?derived_data: t] + * ?derived_data : t {termType: "NamedNode", value: "https://cn-stage.test.dataone.org/cn/v2/resolve/urn%3Auuid%3Adbbb9a2e-af64-452a-b7b9-122861a5dbb2"} + * ?pid : t {termType: "Literal", value: "urn:uuid:dbbb9a2e-af64-452a-b7b9-122861a5dbb2", datatype: t} + * ?primary_data : t {termType: "NamedNode", value: "https://cn-stage.test.dataone.org/cn/v2/resolve/urn%3Auuid%3Aaae9d025-a331-4c3a-b399-a8ca0a2826ef"} + * ?prov_wasDerivedFrom : t {termType: "Literal", value: "urn:uuid:aae9d025-a331-4c3a-b399-a8ca0a2826ef", datatype: t}] + */ onResult(result) { const currentPid = this.getValue(result, "?pid"); let resval; - // If there is a solution for this query, assign the value - // to the prov field attribute (e.g. "prov_generated") of the package member (a DataONEObject) - // with id = '?pid' + // If there is a solution for this query, assign the value to the prov + // field attribute (e.g. "prov_generated") of the package member (a + // DataONEObject) with id = '?pid' if (typeof currentPid !== "undefined" && currentPid !== " ") { let currentMember = null; let fieldName = null; @@ -1096,24 +1146,26 @@ define([ if (typeof currentMember === "undefined") { return; } - // Search for a provenenace field value (i.e. 'prov_wasDerivedFrom') that was - // returned from the query. The current prov queries all return one prov field each - // (see this.provFields). - // Note: dataPackage.provSources and dataPackage.provDerivations are accumulators for - // the entire DataPackage. member.sources and member.derivations are accumulators for - // each package member, and are used by functions such as ProvChartView(). + // Search for a provenenace field value (i.e. 'prov_wasDerivedFrom') + // that was returned from the query. The current prov queries all + // return one prov field each (see this.provFields). Note: + // dataPackage.provSources and dataPackage.provDerivations are + // accumulators for the entire DataPackage. member.sources and + // member.derivations are accumulators for each package member, and + // are used by functions such as ProvChartView(). for (let iFld = 0; iFld < this.provFields.length; iFld += 1) { fieldName = this.provFields[iFld]; resval = `?${fieldName}`; - // The pid corresponding to the object of the RDF triple, with the predicate - // of 'prov_generated', 'prov_used', etc. - // getValue returns a string value. + // The pid corresponding to the object of the RDF triple, with the + // predicate of 'prov_generated', 'prov_used', etc. getValue returns + // a string value. const provFieldResult = this.getValue(result, resval); if (provFieldResult !== " ") { - // Find the Datapacakge member for the result 'pid' and add the result - // prov_* value to it. This is the package member that is the 'subject' of the - // prov relationship. - // The 'resultMember' could be in the current package, or could be in another 'related' package. + // Find the Datapacakge member for the result 'pid' and add the + // result prov_* value to it. This is the package member that is + // the 'subject' of the prov relationship. The 'resultMember' + // could be in the current package, or could be in another + // 'related' package. resultMember = this.find( (model) => model.get("id") === provFieldResult, ); @@ -1131,7 +1183,8 @@ define([ if (!packageMember) { this.sources.push(resultMember); } - // Only add the result member if it has not already been added. + // Only add the result member if it has not already been + // added. if (!matchingMember) { vals = currentMember.get("provSources"); vals.push(resultMember); @@ -1144,7 +1197,8 @@ define([ const matchingDerivation = currentMember .get("provDerivations") .find((source) => source.id === provFieldResult); - // If this prov field is a 'derivation' field, add it to 'derivations' + // If this prov field is a 'derivation' field, add it to + // 'derivations' if (!derivation) { this.derivations.push(resultMember); } @@ -1155,23 +1209,27 @@ define([ } } - // Get the existing values for this prov field in the package member + // Get the existing values for this prov field in the package + // member vals = currentMember.get(fieldName); - // Push this result onto the prov file list if it is not there, i.e. + // Push this result onto the prov file list if it is not there, + // i.e. if (!_.contains(vals, resultMember)) { vals.push(resultMember); currentMember.set(fieldName, vals); } - // provFieldValues = _.uniq(provFieldValues); - // Add the current prov valid (a pid) to the current value in the member + // provFieldValues = _.uniq(provFieldValues); Add the current + // prov valid (a pid) to the current value in the member // currentMember.set(fieldName, provFieldValues); // this.add(currentMember, { merge: true }); } else { - // The query result field is not the identifier of a packge member, so it may be the identifier - // of another 'related' package, or it may be a string value that is the object of a prov relationship, - // i.e. for 'prov_instanceOfClass' == 'http://purl.dataone.org/provone/2015/01/15/ontology#Data', + // The query result field is not the identifier of a packge + // member, so it may be the identifier of another 'related' + // package, or it may be a string value that is the object of a + // prov relationship, i.e. for 'prov_instanceOfClass' == + // 'http://purl.dataone.org/provone/2015/01/15/ontology#Data', // so add the value to the current member. vals = currentMember.get(fieldName); if (!_.contains(vals, provFieldResult)) { @@ -1184,7 +1242,7 @@ define([ } }, - /* This callback is called when all queries have finished. */ + /** This callback is called when all queries have finished. */ onDone() { if (this.queriesToRun > 1) { this.queriesToRun -= 1; @@ -1195,8 +1253,10 @@ define([ } }, - /* + /** * Use the DataONEObject parseSysMeta() function + * @param {object} sysMeta - The system metadata object to parse + * @returns {object} The parsed system metadata object */ parseSysMeta(sysMeta) { return DataONEObject.parseSysMeta.call(this, sysMeta); @@ -1205,8 +1265,11 @@ define([ /** * Overwrite the Backbone.Collection.sync() function to set custom options * @param {object} [options] - Options for this DataPackage save - * @param {boolean} [options.sysMetaOnly] - If true, only the system metadata of this Package will be saved. - * @param {boolean} [options.resourceMapOnly] - If true, only the Resource Map/Package object will be saved. Metadata and Data objects aggregated by the package will be skipped. + * @param {boolean} [options.sysMetaOnly] - If true, only the system + * metadata of this Package will be saved. + * @param {boolean} [options.resourceMapOnly] - If true, only the Resource + * Map/Package object will be saved. Metadata and Data objects aggregated + * by the package will be skipped. */ save(options = {}) { this.packageModel.set("uploadStatus", "p"); @@ -1224,8 +1287,8 @@ define([ return; } - // If we want to update the system metadata only, - // then update via the DataONEObject model and exit + // If we want to update the system metadata only, then update via the + // DataONEObject model and exit if (options.sysMetaOnly) { this.packageModel.save(null, options); return; @@ -1245,8 +1308,9 @@ define([ const modelsToBeSaved = _.filter( sortedModels, (m) => - // Models should be saved if they are in the save queue, had an error saving earlier, - // or they are Science Metadata model that is NOT already in progress + // Models should be saved if they are in the save queue, had an + // error saving earlier, or they are Science Metadata model that + // is NOT already in progress (m.get("type") === "Metadata" && m.get("uploadStatus") === "q") || (m.get("type") === "Data" && m.get("hasContentChanges") && @@ -1259,12 +1323,13 @@ define([ m.get("uploadStatus") !== "e" && m.get("uploadStatus") !== null), ); - // Get an array of data objects whose system metadata should be updated. + // Get an array of data objects whose system metadata should be + // updated. sysMetaToUpdate = _.reject( dataModels, (m) => - // Find models that don't have any content changes to save, - // and whose system metadata is not already saving + // Find models that don't have any content changes to save, and + // whose system metadata is not already saving !m.hasUpdates() || m.get("hasContentChanges") || m.get("sysMetaUploadStatus") === "p" || @@ -1281,8 +1346,8 @@ define([ return false; }); - // If at least once model to be saved is invalid, - // or the metadata failed to save, cancel the save. + // If at least once model to be saved is invalid, or the metadata + // failed to save, cancel the save. if ( !allValid || _.contains( @@ -1296,7 +1361,8 @@ define([ return; } - // If we are saving at least one model in this package, then serialize the Resource Map RDF XML + // If we are saving at least one model in this package, then serialize + // the Resource Map RDF XML if (modelsToBeSaved.length) { try { // Set a new id and keep our old id @@ -1324,7 +1390,8 @@ define([ // First save all the models of the collection, if needed modelsToBeSaved.forEach((model) => { - // If the model is saved successfully, start this save function again + // If the model is saved successfully, start this save function + // again this.stopListening(model, "successSaving", this.save); this.listenToOnce(model, "successSaving", this.save); @@ -1356,10 +1423,12 @@ define([ this.numSaves += 1; }); - // If there are still models in progress of uploading, then exit. (We will return when they are synced to upload the resource map) + // If there are still models in progress of uploading, then exit. (We + // will return when they are synced to upload the resource map) if (modelsInProgress.length) return; } - // If we are saving the resource map object only, and there are changes to save, serialize the RDF XML + // If we are saving the resource map object only, and there are changes + // to save, serialize the RDF XML else if (this.needsUpdate()) { try { // Set a new id and keep our old id @@ -1384,12 +1453,14 @@ define([ return; } } - // If we are saving the resource map object only, and there are no changes to save, exit the function + // If we are saving the resource map object only, and there are no + // changes to save, exit the function else if (!this.needsUpdate()) { return; } - // If no models were saved and this package has no changes, we can exit without saving the resource map + // If no models were saved and this package has no changes, we can exit + // without saving the resource map if (this.numSaves < 1 && !this.needsUpdate()) { this.numSaves = 0; this.packageModel.set( @@ -1400,7 +1471,8 @@ define([ return; } - // Reset the number of models saved since they should all be completed by now + // Reset the number of models saved since they should all be completed + // by now this.numSaves = 0; // Determine the HTTP request type @@ -1423,8 +1495,10 @@ define([ formData.append("pid", this.packageModel.get("oldPid")); } - // Do a fresh re-serialization of the RDF XML, in case any pids in the package have changed. - // The hope is that any errors during the serialization process have already been caught during the first serialization above + // Do a fresh re-serialization of the RDF XML, in case any pids in the + // package have changed. The hope is that any errors during the + // serialization process have already been caught during the first + // serialization above try { mapXML = this.serialize(); } catch (serializationException) { @@ -1465,8 +1539,9 @@ define([ type: "application/xml", }); - // Add the object XML and System Metadata XML to the form data - // Append the system metadata first, so we can take advantage of Metacat's streaming multipart handler + // Add the object XML and System Metadata XML to the form data Append + // the system metadata first, so we can take advantage of Metacat's + // streaming multipart handler formData.append("sysmeta", xmlBlob, "sysmeta"); formData.append("object", mapBlob); @@ -1492,8 +1567,8 @@ define([ m.set("uploadStatus", m.defaults().uploadStatus); }); - // Reset oldPid to null so we know we need to update the ID - // in the future + // Reset oldPid to null so we know we need to update the ID in the + // future collection.packageModel.set("oldPid", null); // Reset the upload status for the package @@ -1525,7 +1600,8 @@ define([ m.set("uploadStatus", m.defaults().uploadStatus); }); - // When there is no network connection (status === 0), there will be no response text + // When there is no network connection (status === 0), there will be + // no response text let parsedResponse = "There was a network issue that prevented this file from uploading. " + "Make sure you are connected to a reliable internet connection."; @@ -1558,11 +1634,13 @@ define([ ); }, - /* + /** * When a data package member updates, we evaluate it for its formatid, * and update it appropriately if it is not a data object only + * @param {Backbone.Model} context - The model that was updated + * @returns {Backbone.Model} The updated model */ - getMember(context, _args) { + getMember(context) { let memberModel = {}; switch (context.get("formatId")) { @@ -1868,10 +1946,16 @@ define([ return memberModel; }, + /** + * Trigger the complete event if all models have been fetched + * @param {Backbone.Model} model - The model that was fetched + */ triggerComplete(model) { - // If the last fetch did not fetch the models of the collection, then mark as complete now. + // If the last fetch did not fetch the models of the collection, then + // mark as complete now. if (this.fetchModels === false) { - // Delete the fetchModels property since it is set only once per fetch. + // Delete the fetchModels property since it is set only once per + // fetch. delete this.fetchModels; this.trigger("complete", this); @@ -1884,28 +1968,37 @@ define([ (m) => m.get("synced") || m.get("id") === model.get("id"), ); - // If there are any models that are not synced yet, the collection is not complete + // If there are any models that are not synced yet, the collection is + // not complete if (notSynced.length > 0) { return; } - // If the number of models in this collection does not equal the number of objects referenced in the RDF XML, the collection is not complete + // If the number of models in this collection does not equal the number + // of objects referenced in the RDF XML, the collection is not complete if (this.originalMembers.length > this.length) return; this.sort(); this.trigger("complete", this); }, - /* Accumulate edits that are made to the provenance relationships via the ProvChartView. these - edits are accumulated here so that they are available to any package member or view. - */ + /** + * Accumulate edits that are made to the provenance relationships via the + * ProvChartView. these edits are accumulated here so that they are + * available to any package member or view. + * @param {string} operation - The operation performed on the relationship + * (add or delete) + * @param {string} subject - The subject of the relationship + * @param {string} predicate - The predicate of the relationship + * @param {string} object - The object of the relationship + */ recordProvEdit(operation, subject, predicate, object) { if (!this.provEdits.length) { this.provEdits = [[operation, subject, predicate, object]]; } else { // First check if the edit already exists in the list. If yes, then - // don't add it again! This could occur if an edit icon was clicked rapidly - // before it is dismissed. + // don't add it again! This could occur if an edit icon was clicked + // rapidly before it is dismissed. const editFound = _.find( this.provEdits, (edit) => @@ -1943,23 +2036,29 @@ define([ }); // If we cancelled out edit containing inverse of the current edit - // then the edit list will now be one edit shorter. Test for this - // and only save the current edit if we didn't remove the inverse. + // then the edit list will now be one edit shorter. Test for this and + // only save the current edit if we didn't remove the inverse. if (editListSize >= this.provEdits.length) { this.provEdits.push([operation, subject, predicate, object]); } } }, - // Return true if the prov edits list is not empty + /** + * Check if there are any provenance edits pending + * @returns {boolean} Returns true if the prov edits list is not empty, + * otherwise false. + */ provEditsPending() { if (this.provEdits.length) return true; return false; }, - /* If provenance relationships have been modified by the provenance editor (in ProvChartView), then - update the ORE Resource Map and save it to the server. - */ + /** + * If provenance relationships have been modified by the provenance editor + * (in ProvChartView), then update the ORE Resource Map and save it to the + * server. + */ saveProv() { const graph = this.dataPackageGraph; const rdfRef = this.rdf; @@ -1971,28 +2070,30 @@ define([ const RDF = rdfRef.Namespace(this.namespaces.RDF); const PROV = rdfRef.Namespace(this.namespaces.PROV); const PROVONE = rdfRef.Namespace(this.namespaces.PROVONE); - // The following are not used: - // const DCTERMS = rdfRef.Namespace(this.namespaces.DCTERMS); - // const CITO = rdfRef.Namespace(this.namespaces.CITO); - // const XSD = rdfRef.Namespace(this.namespaces.XSD); - - /* Check if this package member had provenance relationships added - or deleted by the provenance editor functionality of the ProvChartView - */ + // The following are not used: const DCTERMS = + // rdfRef.Namespace(this.namespaces.DCTERMS); const CITO = + // rdfRef.Namespace(this.namespaces.CITO); const XSD = + // rdfRef.Namespace(this.namespaces.XSD); + + // Check if this package member had provenance relationships added or + // deleted by the provenance editor functionality of the ProvChartView provEdits.forEach((edit) => { const [operation, subject, predicate, object] = edit; - // The predicates of the provenance edits recorded by the ProvChartView - // indicate which W3C PROV relationship has been recorded. - // First check if this relationship alread exists in the RDF graph. - // See DataPackage.parseProv for a description of how relationships from an ORE resource map - // are parsed and stored in DataONEObjects. Here we are reversing the process, so may need - // The representation of the PROVONE data model is simplified in the ProvChartView, to aid - // legibility for users not familiar with the details of the PROVONE model. In this simplification, - // a provone:Program has direct inputs and outputs. In the actual model, a prov:Execution has - // inputs and outputs and is connected to a program via a prov:association. We must 'expand' the - // simplified provenance updates recorded by the editor into the fully detailed representation - // of the actual model. + // The predicates of the provenance edits recorded by the + // ProvChartView indicate which W3C PROV relationship has been + // recorded. First check if this relationship alread exists in the RDF + // graph. See DataPackage.parseProv for a description of how + // relationships from an ORE resource map are parsed and stored in + // DataONEObjects. Here we are reversing the process, so may need The + // representation of the PROVONE data model is simplified in the + // ProvChartView, to aid legibility for users not familiar with the + // details of the PROVONE model. In this simplification, a + // provone:Program has direct inputs and outputs. In the actual model, + // a prov:Execution has inputs and outputs and is connected to a + // program via a prov:association. We must 'expand' the simplified + // provenance updates recorded by the editor into the fully detailed + // representation of the actual model. let executionId; let executionNode; let programId; @@ -2038,10 +2139,12 @@ define([ programId = object; dataNode = subjectNode; if (operation === "add") { - // 'subject' is the program id, which is a simplification of the PROVONE model for display. - // In the PROVONE model, execution 'uses' and input, and is associated with a program. + // 'subject' is the program id, which is a simplification of the + // PROVONE model for display. In the PROVONE model, execution + // 'uses' and input, and is associated with a program. executionId = this.addProgramToGraph(programId); - // executionNode = rdfRef.sym(cnResolveUrl + encodeURIComponent(executionId)); + // executionNode = rdfRef.sym(cnResolveUrl + + // encodeURIComponent(executionId)); executionNode = this.getExecutionNode(executionId); this.addToGraph(dataNode, RDF("type"), PROVONE("Data")); this.addToGraph( @@ -2070,10 +2173,12 @@ define([ programId = object; dataNode = subjectNode; if (operation === "add") { - // 'subject' is the program id, which is a simplification of the PROVONE model for display. - // In the PROVONE model, execution 'uses' and input, and is associated with a program. + // 'subject' is the program id, which is a simplification of the + // PROVONE model for display. In the PROVONE model, execution + // 'uses' and input, and is associated with a program. executionId = this.addProgramToGraph(programId); - // executionNode = rdfRef.sym(cnResolveUrl + encodeURIComponent(executionId)); + // executionNode = rdfRef.sym(cnResolveUrl + + // encodeURIComponent(executionId)); executionNode = this.getExecutionNode(executionId); this.addToGraph(dataNode, RDF("type"), PROVONE("Data")); this.addToGraph(executionNode, PROV("used"), dataNode); @@ -2134,15 +2239,21 @@ define([ } }); - // When saving provenance only, we only have to save the Resource Map/Package object. - // So we will send the resourceMapOnly flag with the save function. + // When saving provenance only, we only have to save the Resource + // Map/Package object. So we will send the resourceMapOnly flag with + // the save function. this.save({ resourceMapOnly: true, }); }, - /* Add the specified relationship to the RDF graph only if it - has not already been added. */ + /** + * Add the specified relationship to the RDF graph only if it has not + * already been added. + * @param {object} subject - The subject of the statement to add + * @param {object} predicate - The predicate of the statement to add + * @param {object} object - The object of the statement to add + */ addToGraph(subject, predicate, object) { const graph = this.dataPackageGraph; const statements = graph.statementsMatching(subject, predicate, object); @@ -2152,13 +2263,18 @@ define([ } }, - /* Remove the statement fromn the RDF graph only if the subject of this - relationship is not referenced by any other provenance relationship, i.e. - for example, the prov relationship "id rdf:type provone:data" is only - needed if the subject ('id') is referenced in another relationship. - Also don't remove it if the subject is in any other prov statement, - meaning it still references another prov object. - */ + /** + * Remove the statement fromn the RDF graph only if the subject of this + * relationship is not referenced by any other provenance relationship, + * i.e. for example, the prov relationship "id rdf:type provone:data" is + * only needed if the subject ('id') is referenced in another + * relationship. Also don't remove it if the subject is in any other prov + * statement, meaning it still references another prov object. + * @param {object} subjectNode - The subject of the statement to remove + * @param {object} predicateNode - The predicate of the statement to + * remove + * @param {object} objectNode - The object of the statement to remove + */ removeIfLastProvRef(subjectNode, predicateNode, objectNode) { const graph = this.dataPackageGraph; const PROV = rdf.Namespace(this.namespaces.PROV); @@ -2167,8 +2283,8 @@ define([ const provStr = PROV("").value; // PROVONE namespace value, used to identify PROVONE statements const provoneStr = PROVONE("").value; - // Get the statements from the RDF graph that reference the subject of the - // statement to remove. + // Get the statements from the RDF graph that reference the subject of + // the statement to remove. let statements = graph.statementsMatching( undefined, undefined, @@ -2185,10 +2301,9 @@ define([ const pVal = statement.predicate.value; - // Now check if the subject is referenced in a prov statement - // There is another statement that references the subject of the - // statement to remove, so it is still being used and don't - // remove it. + // Now check if the subject is referenced in a prov statement There is + // another statement that references the subject of the statement to + // remove, so it is still being used and don't remove it. if (pVal.indexOf(provStr) !== -1) return true; if (pVal.indexOf(provoneStr) !== -1) return true; return false; @@ -2218,8 +2333,8 @@ define([ if (pVal.indexOf(provStr) !== -1) return true; if (pVal.indexOf(provoneStr) !== -1) return true; // There is another statement that references the subject of the - // statement to remove, so it is still being used and don't - // remove it. + // statement to remove, so it is still being used and don't remove + // it. return false; }, this, @@ -2240,12 +2355,12 @@ define([ /** * Remove orphaned blank nodes from the model's current graph * - * This was put in to support replacing package members who are - * referenced by provenance statements, specifically members typed as - * Programs. rdflib.js will throw an error when serializing if any - * statements in the graph have objects that are blank nodes when no - * other statements in the graph have subjects for the same blank node. - * i.e., blank nodes references that aren't defined. + * This was put in to support replacing package members who are referenced + * by provenance statements, specifically members typed as Programs. + * rdflib.js will throw an error when serializing if any statements in the + * graph have objects that are blank nodes when no other statements in the + * graph have subjects for the same blank node. i.e., blank nodes + * references that aren't defined. * * Should be called during a call to serialize() and mutates * this.dataPackageGraph directly as a side-effect. @@ -2287,11 +2402,15 @@ define([ }); }, - /* Get the execution identifier that is associated with a program id. - This will either be in the 'prov_wasExecutedByExecution' of the package member - for the program script, or available by tracing backward in the RDF graph from - the program node, through the assocation to the related execution. - */ + /** + * Get the execution identifier that is associated with a program id. This + * will either be in the 'prov_wasExecutedByExecution' of the package + * member for the program script, or available by tracing backward in the + * RDF graph from the program node, through the assocation to the related + * execution. + * @param {string} programId - The program identifier + * @returns {string} The execution identifier + */ getExecutionId(programId) { const rdfRef = this.rdf; const graph = this.dataPackageGraph; @@ -2300,8 +2419,7 @@ define([ rdfRef.Namespace(this.namespaces.RDF); const PROV = rdfRef.Namespace(this.namespaces.PROV); - // Not used: - // const DCTERMS = rdfRef.Namespace(this.namespaces.DCTERMS); + // Not used: const DCTERMS = rdfRef.Namespace(this.namespaces.DCTERMS); // const PROVONE = rdfRef.Namespace(this.namespaces.PROVONE); const member = this.get(programId); @@ -2310,8 +2428,8 @@ define([ return executionId[0]; } const programNode = rdfRef.sym(this.getURIFromRDF(programId)); - // Get the executionId from the RDF graph - // There can be only one plan for an association + // Get the executionId from the RDF graph There can be only one plan for + // an association stmts = graph.statementsMatching( undefined, PROV("hadPlan"), @@ -2329,11 +2447,14 @@ define([ return stmts[0].subject; }, - /* Get the RDF node for an execution that is associated with the execution identifier. - The execution may have been created in the resource map as a 'bare' urn:uuid - (no resolveURI), or as a resolve URL, so check for both until the id is - found. - */ + /** + * Get the RDF node for an execution that is associated with the execution + * identifier. The execution may have been created in the resource map as + * a 'bare' urn:uuid (no resolveURI), or as a resolve URL, so check for + * both until the id is found. + * @param {string} executionId - The execution identifier + * @returns {object} The RDF node for the execution + */ getExecutionNode(executionId) { const rdfRef = this.rdf; const graph = this.dataPackageGraph; @@ -2342,15 +2463,16 @@ define([ this.getCnURI(); let executionNode = null; - // First see if the execution exists in the RDF graph as a 'bare' idenfier, i.e. - // a 'urn:uuid'. + // First see if the execution exists in the RDF graph as a 'bare' + // idenfier, i.e. a 'urn:uuid'. stmts = graph.statementsMatching( rdfRef.sym(executionId), undefined, undefined, ); if (typeof stmts === "undefined" || !stmts.length) { - // The execution node as urn was not found, look for fully qualified version. + // The execution node as urn was not found, look for fully qualified + // version. testNode = rdfRef.sym(this.getURIFromRDF(executionId)); stmts = graph.statementsMatching( rdfRef.sym(executionId), @@ -2369,6 +2491,11 @@ define([ return executionNode; }, + /** + * Add a program identifier to the RDF graph and create an execution node + * @param {string} programId - The program identifier + * @returns {string} The execution identifier + */ addProgramToGraph(programId) { const rdfRef = this.rdf; const graph = this.dataPackageGraph; @@ -2385,22 +2512,25 @@ define([ this.getCnURI(); if (!executionId.length) { - // This is a new execution, so create new execution and association ids + // This is a new execution, so create new execution and association + // ids executionId = `urn:uuid:${uuid.v4()}`; member.set("prov_wasExecutedByExecution", [executionId]); - // Blank node id. RDF validator doesn't like ':' so don't use in the id - // executionNode = rdfRef.sym(cnResolveUrl + encodeURIComponent(executionId)); + // Blank node id. RDF validator doesn't like ':' so don't use in the + // id executionNode = rdfRef.sym(cnResolveUrl + + // encodeURIComponent(executionId)); executionNode = this.getExecutionNode(executionId); // associationId = "_" + uuid.v4(); associationNode = graph.bnode(); } else { [executionId] = executionId; - // Check if an association exists in the RDF graph for this execution id - // executionNode = rdfRef.sym(cnResolveUrl + encodeURIComponent(executionId)); + // Check if an association exists in the RDF graph for this execution + // id executionNode = rdfRef.sym(cnResolveUrl + + // encodeURIComponent(executionId)); executionNode = this.getExecutionNode(executionId); - // Check if there is an association id for this execution. - // If this execution is newly created (via the editor (existing would - // be parsed from the resmap), then create a new association id. + // Check if there is an association id for this execution. If this + // execution is newly created (via the editor (existing would be + // parsed from the resmap), then create a new association id. const stmts = graph.statementsMatching( executionNode, PROV("qualifiedAssociation"), @@ -2416,8 +2546,8 @@ define([ associationNode = graph.bnode(); } } - // associationNode = graph.bnode(associationId); - // associationNode = graph.bnode(); + // associationNode = graph.bnode(associationId); associationNode = + // graph.bnode(); programNode = rdfRef.sym(this.getURIFromRDF(programId)); try { this.addToGraph( @@ -2439,9 +2569,14 @@ define([ return executionId; }, - // Remove a program identifier from the RDF graph and remove associated - // linkage between the program id and the exection, if the execution is not - // being used by any other statements. + /** + * Remove a program identifier from the RDF graph and remove associated + * linkage between the program id and the exection, if the execution is + * not being used by any other statements. + * @param {string} programId - The program identifier + * @returns {boolean} Returns true if the program was removed, otherwise + * false. + */ removeProgramFromGraph(programId) { const graph = this.dataPackageGraph; const rdfRef = this.rdf; @@ -2457,18 +2592,21 @@ define([ const executionId = this.getExecutionId(programId); if (executionId !== null && executionId !== undefined) return false; - // var executionNode = rdfRef.sym(cnResolveUrl + encodeURIComponent(executionId)); + // var executionNode = rdfRef.sym(cnResolveUrl + + // encodeURIComponent(executionId)); const executionNode = this.getExecutionNode(executionId); const programNode = rdfRef.sym(this.getURIFromRDF(programId)); - // In order to remove this program from the graph, we have to first determine that - // nothing else is using the execution that is associated with the program (the plan). - // There may be additional 'used', 'geneated', 'qualifiedGeneration', etc. items that - // may be pointing to the execution. If yes, then don't delete the execution or the - // program (the execution's plan). + // In order to remove this program from the graph, we have to first + // determine that nothing else is using the execution that is associated + // with the program (the plan). There may be additional 'used', + // 'geneated', 'qualifiedGeneration', etc. items that may be pointing to + // the execution. If yes, then don't delete the execution or the program + // (the execution's plan). try { - // Is the program in the graph? If the program is not in the graph, then - // we don't know how to remove the proper execution and assocation. + // Is the program in the graph? If the program is not in the graph, + // then we don't know how to remove the proper execution and + // assocation. stmts = graph.statementsMatching(undefined, undefined, programNode); if (typeof stmts === "undefined" || !stmts.length) return false; @@ -2538,8 +2676,9 @@ define([ return true; }, - /* + /** * Serialize the DataPackage to OAI-ORE RDF XML + * @returns {string} The serialized RDF/XML */ serialize() { // Create an RDF serializer @@ -2561,17 +2700,19 @@ define([ const RDF = this.rdf.Namespace(this.namespaces.RDF); const XSD = this.rdf.Namespace(this.namespaces.XSD); - // Get the pid of this package - depends on whether we are updating or creating a resource map + // Get the pid of this package - depends on whether we are updating or + // creating a resource map const pid = this.packageModel.get("id"); const oldPid = this.packageModel.get("oldPid"); let cnResolveUrl = this.getCnURI(); - // Get a list of the model pids that should be aggregated by this package + // Get a list of the model pids that should be aggregated by this + // package let idsFromModel = []; this.each((packageMember) => { - // If this object isn't done uploading, don't aggregate it. - // Or if it failed to upload, don't aggregate it. - // But if the system metadata failed to update, it can still be aggregated. + // If this object isn't done uploading, don't aggregate it. Or if it + // failed to upload, don't aggregate it. But if the system metadata + // failed to update, it can still be aggregated. if ( packageMember.get("uploadStatus") !== "p" || packageMember.get("uploadStatus") !== "e" || @@ -2583,7 +2724,8 @@ define([ this.idsToAggregate = idsFromModel; - // Update the pids in the RDF graph only if we are updating the resource map with a new pid + // Update the pids in the RDF graph only if we are updating the resource + // map with a new pid if (!this.packageModel.isNew()) { // Remove all describes/isDescribedBy statements (they'll be rebuilt) this.dataPackageGraph.removeMany( @@ -2601,7 +2743,8 @@ define([ undefined, ); - // Create variations of the resource map ID using the resolve URL so we can always find it in the RDF graph + // Create variations of the resource map ID using the resolve URL so + // we can always find it in the RDF graph oldPidVariations = [ oldPid, encodeURIComponent(oldPid), @@ -2610,7 +2753,8 @@ define([ this.getURIFromRDF(oldPid), ]; - // Using the isAggregatedBy statements, find all the DataONE object ids in the RDF graph + // Using the isAggregatedBy statements, find all the DataONE object + // ids in the RDF graph const idsFromXML = []; const identifierStatements = this.dataPackageGraph.statementsMatching( @@ -2637,7 +2781,8 @@ define([ idsFromModel = _.union(idsFromModel, Object.keys(childPackages)); } - // Find the difference between the model IDs and the XML IDs to get a list of added members + // Find the difference between the model IDs and the XML IDs to get a + // list of added members const addedIds = _.without( _.difference(idsFromModel, idsFromXML), oldPidVariations, @@ -2663,7 +2808,8 @@ define([ idNode, ); - // Change all the resource map identifier literal node in the RDF graph + // Change all the resource map identifier literal node in the RDF + // graph if (idStatements.length) { const idStatement = idStatements[0]; @@ -2708,14 +2854,16 @@ define([ ), ); - // Remove any other isAggregatedBy statements that are not listed as members of this model + // Remove any other isAggregatedBy statements that are not listed as + // members of this model aggByStatements.forEach((statement) => { if (!_.contains(allMemberIds, statement.subject.value)) { this.removeFromAggregation(statement.subject.value); } }); - // Change all the statements in the RDF where the aggregation is the subject, to reflect the new resource map ID + // Change all the statements in the RDF where the aggregation is the + // subject, to reflect the new resource map ID let aggregationNode; oldPidVariations.forEach((oldPidVar) => { // Create a node for the old aggregation using this pid variation @@ -2744,7 +2892,8 @@ define([ // Set the subject value to the new aggregation id subjectClone.value = `${this.getURIFromRDF(pid)}#aggregation`; - // Add a new statement with the new aggregation subject but the same predicate and object + // Add a new statement with the new aggregation subject but the + // same predicate and object this.dataPackageGraph.add( subjectClone, predicateClone, @@ -2756,7 +2905,8 @@ define([ this.dataPackageGraph.removeMany(aggregationNode); } - // Change all the statements in the RDF where the aggregation is the object, to reflect the new resource map ID + // Change all the statements in the RDF where the aggregation is the + // object, to reflect the new resource map ID const aggregationObjStatements = _.union( this.dataPackageGraph.statementsMatching( undefined, @@ -2780,7 +2930,8 @@ define([ // Set the object to the new aggregation pid objectClone.value = `${this.getURIFromRDF(pid)}#aggregation`; - // Add the statement with the old subject and predicate but new aggregation object + // Add the statement with the old subject and predicate but new + // aggregation object this.dataPackageGraph.add( subjectClone, predicateClone, @@ -2821,7 +2972,8 @@ define([ // Remove the old resource map statement this.dataPackageGraph.remove(statement); - // Add the statement with the new subject pid, but the same predicate and object + // Add the statement with the new subject pid, but the same + // predicate and object this.dataPackageGraph.add( subjectClone, predicateClone, @@ -2909,8 +3061,8 @@ define([ // Remove any references to blank nodes not already cleaned up. // rdflib.js will fail to serialize an IndexedFormula (graph) with - // statements whose object is a blank node when the blank node - // is not the subject of any other statements. + // statements whose object is a blank node when the blank node is not + // the subject of any other statements. this.removeOrphanedBlankNodes(); const xmlString = serializer.statementsToXML( @@ -2931,7 +3083,8 @@ define([ case "NamedNode": return this.rdf.sym(nodeToClone.value); case "Literal": - // Check for the datatype for this literal value, e.g. http://www.w3.org/2001/XMLSchema#string" + // Check for the datatype for this literal value, e.g. + // http://www.w3.org/2001/XMLSchema#string" if (typeof nodeToClone.datatype !== "undefined") { return this.rdf.literal( nodeToClone.value, @@ -2947,12 +3100,16 @@ define([ // TODO: construct a list of nodes for this term type. return this.rdf.list(nodeToClone.value); default: - // TODO: Handle error `unknown node type to clone: ${nodeToClone.termType}` + // TODO: Handle error `unknown node type to clone: + // ${nodeToClone.termType}` return null; } }, - // Adds a new object to the resource map RDF graph + /** + * Adds a new object to the resource map RDF graph + * @param {string} id - The identifier of the object to add + */ addToAggregation(id) { // Initialize the namespaces const ORE = this.rdf.Namespace(this.namespaces.ORE); @@ -2960,7 +3117,8 @@ define([ const XSD = this.rdf.Namespace(this.namespaces.XSD); const CITO = this.rdf.Namespace(this.namespaces.CITO); - // Create a node for this object, the identifier, the resource map, and the aggregation + // Create a node for this object, the identifier, the resource map, and + // the aggregation const objectNode = this.rdf.sym(this.getURIFromRDF(id)); const rMapURI = this.getURIFromRDF(this.packageModel.get("id")); this.rdf.sym(rMapURI); @@ -2972,7 +3130,8 @@ define([ let documentsStatements = []; let isDocumentedByStatements = []; - // Add the statement: this object isAggregatedBy the resource map aggregation + // Add the statement: this object isAggregatedBy the resource map + // aggregation aggByStatements = this.dataPackageGraph.statementsMatching( objectNode, ORE("isAggregatedBy"), @@ -2982,7 +3141,8 @@ define([ this.dataPackageGraph.add(objectNode, ORE("isAggregatedBy"), aggNode); } - // Add the statement: The resource map aggregation aggregates this object + // Add the statement: The resource map aggregation aggregates this + // object aggStatements = this.dataPackageGraph.statementsMatching( aggNode, ORE("aggregates"), @@ -2992,7 +3152,8 @@ define([ this.dataPackageGraph.add(aggNode, ORE("aggregates"), objectNode); } - // Add the statement: This object has the identifier {id} if it isn't present + // Add the statement: This object has the identifier {id} if it isn't + // present idStatements = this.dataPackageGraph.statementsMatching( objectNode, DCTERMS("identifier"), @@ -3007,7 +3168,8 @@ define([ const isDocBy = model.get("isDocumentedBy"); const documents = model.get("documents"); - // Deal with Solr indexing bug where metadata-only packages must "document" themselves + // Deal with Solr indexing bug where metadata-only packages must + // "document" themselves if (isDocBy.length === 0 && documents.length === 0) { documents.push(model.get("id")); } @@ -3025,14 +3187,16 @@ define([ m.get("id"), ); - // Find the metadata IDs that are in this package that also documents this data object + // Find the metadata IDs that are in this package that also documents + // this data object let metadataIds = Array.isArray(isDocBy) ? _.intersection(metadataInPackageIDs, isDocBy) : _.intersection(metadataInPackageIDs, [isDocBy]); - // If this data object is not documented by one of these metadata docs, - // then we should check if it's documented by an obsoleted pid. If so, - // we'll want to change that so it's documented by a current metadata. + // If this data object is not documented by one of these metadata + // docs, then we should check if it's documented by an obsoleted pid. + // If so, we'll want to change that so it's documented by a current + // metadata. if (!metadataIds.length) { for (let i = 0; i < metadataInPackage.length; i += 1) { // If the previous version of this metadata documents this data, @@ -3046,7 +3210,8 @@ define([ } } - // For each metadata that documents this object, add a CITO:isDocumentedBy and CITO:documents statement + // For each metadata that documents this object, add a + // CITO:isDocumentedBy and CITO:documents statement metadataIds.forEach((metaId) => { // Create the named nodes and statements const dataNode = this.rdf.sym(this.getURIFromRDF(id)); @@ -3093,7 +3258,8 @@ define([ // Find the identifier statement for this data object const dataURI = this.getURIFromRDF(dataID); - // Create a data node using the exact way the identifier URI is written + // Create a data node using the exact way the identifier URI is + // written const dataNode = this.rdf.sym(dataURI); // Get the statements for data isDocumentedBy metadata @@ -3139,8 +3305,9 @@ define([ } }, - /* + /** * Removes an object from the aggregation in the RDF graph + * @param {string} id - The identifier of the object to remove */ removeFromAggregation(id) { let identifier = id; @@ -3151,7 +3318,8 @@ define([ // Create a literal node for the removed object const removedObjNode = this.rdf.sym(identifier); - // Get the statements from the RDF where the removed object is the subject or object + // Get the statements from the RDF where the removed object is the + // subject or object const statements = $.extend( true, [], @@ -3174,11 +3342,12 @@ define([ }, /** - * Finds the given identifier in the RDF graph and returns the subject - * URI of that statement. This is useful when adding additional statements - * to the RDF graph for an object that already exists in that graph. + * Finds the given identifier in the RDF graph and returns the subject URI + * of that statement. This is useful when adding additional statements to + * the RDF graph for an object that already exists in that graph. * @param {string} id - The identifier to search for - * @returns {string} - The full URI for the given id as it exists in the RDF. + * @returns {string} - The full URI for the given id as it exists in the + * RDF. */ getURIFromRDF(id) { // Exit if no id was given @@ -3214,7 +3383,8 @@ define([ return this.dataPackageGraph.cnResolveUrl; } if (this.packageModel.get("oldPid")) { - // Find the identifier statement for the resource map in the RDF graph + // Find the identifier statement for the resource map in the RDF + // graph const idNode = this.rdf.lit(this.packageModel.get("oldPid")); const idStatements = this.dataPackageGraph.statementsMatching( undefined, @@ -3264,8 +3434,8 @@ define([ ) return true; - // If the provenance relationships have been updated, then the resource map - // needs to be updated. + // If the provenance relationships have been updated, then the resource + // map needs to be updated. if (this.provEdits.length) return true; // Check for changes to the isDocumentedBy relationships let isDifferent = false; @@ -3273,7 +3443,8 @@ define([ // Keep going until we find a difference while (!isDifferent && i < this.length) { - // Get the original isDocBy relationships from the resource map, and the new isDocBy relationships from the models + // Get the original isDocBy relationships from the resource map, and + // the new isDocBy relationships from the models let isDocBy = this.models[i].get("isDocumentedBy"); const id = this.models[i].get("id"); let origIsDocBy = this.originalIsDocBy[id]; @@ -3290,7 +3461,8 @@ define([ ), ); - // Remove the id of this object so metadata can not be "isDocumentedBy" itself + // Remove the id of this object so metadata can not be + // "isDocumentedBy" itself isDocBy = _.without(isDocBy, id); origIsDocBy = _.without(origIsDocBy, id); @@ -3312,8 +3484,10 @@ define([ return isDifferent; }, - /* - * Returns an array of the models that are in the queue or in progress of uploading + /** + * Gets objects not yet uploaded to the DataONE server + * @returns {Array} An array of models that are in the queue or in + * progress of uploading */ getQueue() { return this.filter( @@ -3321,8 +3495,9 @@ define([ ); }, - /* - * Adds a DataONEObject model to this DataPackage collection + /** + * Adds a DataONEObject model to this DataPackage collection + * @param {DataONEObject} model - The DataONEObject model to add */ addNewModel(model) { // Check that this collection doesn't already contain this model @@ -3335,6 +3510,11 @@ define([ } }, + /** + * Actions ot perform when a DataONEObject model is added to this + * collection + * @param {DataONEObject} dataONEObject - The DataONEObject model that was added + */ handleAdd(dataONEObject) { const metadataModel = this.find((m) => m.get("type") === "Metadata"); @@ -3347,7 +3527,8 @@ define([ ) metadataModel.get("documents").push(dataONEObject.id); - // Create an EML Entity for this DataONE Object if there isn't one already + // Create an EML Entity for this DataONE Object if there isn't one + // already if ( metadataModel.type === "EML" && !dataONEObject.get("metadataEntity") && @@ -3361,46 +3542,19 @@ define([ this.saveReference(dataONEObject); this.setLoadingFiles(dataONEObject); - - // Save a reference to this DataPackage - // If the collections attribute is an array - /* if( Array.isArray(dataONEObject.get("collections")) ){ - //Add this DataPackage to the collections list if it's not already in the array - if( !_.contains(dataONEObject.get("collections"), this) ){ - dataONEObject.get("collections").push(this); - } - } - //If the collections attribute is not an array but there is a value, - else if(dataONEObject.get("collections")){ - - //And if the value is not this DataPackage or it's pid, then set it on the model - if( dataONEObject.get("collections") !== this && dataONEObject.get("collections") !== this.get("id") ){ - dataONEObject.set("collections", [dataONEObject.get("collections"), this] ); - } - //Otherwise, set the collections attribute to this DataPackage in an array - else { - dataONEObject.set("collections", [this]); - } - - } - // If there is no value set on the collections attribute, then set it to - // this DataPackage in an array - else{ - dataONEObject.set("collections", [this]); - } -*/ }, /** - * Fetches this DataPackage from the Solr index by using a SolrResults collection - * and merging the models in. + * Fetches this DataPackage from the Solr index by using a SolrResults + * collection and merging the models in. */ fetchFromIndex() { if (typeof this.solrResults === "undefined" || !this.solrResults) { this.solrResults = new SolrResults(); } - // If no query is set yet, use the FilterModel associated with this DataPackage + // If no query is set yet, use the FilterModel associated with this + // DataPackage if (!this.solrResults.currentquery.length) { this.solrResults.currentquery = this.filterModel.getQuery(); } @@ -3417,14 +3571,18 @@ define([ }, /** - * Merge the attributes of other models into the corresponding models in this collection. - * This should be used when merging models of other types (e.g. SolrResult) that represent the same - * object that the DataONEObject models in the collection represent. - * @param {Backbone.Model[]} otherModels - the other models to merge with the models in this collection - * @param {string[]} [fieldsToMerge] - If specified, only these fields will be extracted from the otherModels + * Merge the attributes of other models into the corresponding models in + * this collection. This should be used when merging models of other types + * (e.g. SolrResult) that represent the same object that the DataONEObject + * models in the collection represent. + * @param {Backbone.Model[]} otherModels - the other models to merge with + * the models in this collection + * @param {string[]} [fieldsToMerge] - If specified, only these fields + * will be extracted from the otherModels */ mergeModels(otherModels, fieldsToMerge) { - // If no otherModels are given, exit the function since there is nothing to merge + // If no otherModels are given, exit the function since there is nothing + // to merge if ( typeof otherModels === "undefined" || !otherModels || @@ -3434,7 +3592,8 @@ define([ } otherModels.forEach((otherModel) => { - // Get the model from this collection that matches ids with the other model + // Get the model from this collection that matches ids with the other + // model const modelInDataPackage = this.findWhere({ id: otherModel.get("id"), }); @@ -3443,7 +3602,8 @@ define([ if (modelInDataPackage) { let valuesFromOtherModel; - // If specific fields to merge are given, get the values for those from the other model + // If specific fields to merge are given, get the values for those + // from the other model if (fieldsToMerge && fieldsToMerge.length) { valuesFromOtherModel = _.pick(otherModel.toJSON(), fieldsToMerge); } @@ -3457,8 +3617,9 @@ define([ const omitKeys = []; _.each(otherModelAttr, (val, key) => { - // If this model's attribute is the default, don't set it on our DataONEObject model - // because whatever value is in the DataONEObject model is better information than the default + // If this model's attribute is the default, don't set it on our + // DataONEObject model because whatever value is in the + // DataONEObject model is better information than the default // value of the other model. if (otherModelDefaults[key] === val) omitKeys.push(key); }); @@ -3467,15 +3628,14 @@ define([ valuesFromOtherModel = _.omit(otherModelAttr, omitKeys); } - // Set the values from the other model on the model in this collection + // Set the values from the other model on the model in this + // collection modelInDataPackage.set(valuesFromOtherModel); } }); }, - /** - * Update the relationships in this resource map when its been udpated - */ + /** Update the relationships in this resource map when its been udpated */ updateRelationships() { // Get the old id const oldId = this.packageModel.get("oldPid"); @@ -3491,8 +3651,11 @@ define([ }, this); }, + /** + * Save a reference to this collection in the model + * @param {DataONEObject} model - The model to save a reference to + */ saveReference(model) { - // Save a reference to this collection in the model const currentCollections = model.get("collections"); if (currentCollections.length > 0) { currentCollections.push(this); @@ -3503,13 +3666,12 @@ define([ /** * Broadcast an accessPolicy across members of this package * - * Note: Currently just sets the incoming accessPolicy on this - * object and doesn't broadcast to other members (such as data). - * How this works is likely to change in the future. + * Note: Currently just sets the incoming accessPolicy on this object and + * doesn't broadcast to other members (such as data). How this works is + * likely to change in the future. * * Closely tied to the AccessPolicyView.broadcast property. - * @param {AccessPolicy} accessPolicy - The accessPolicy to - * broadcast + * @param {AccessPolicy} accessPolicy - The accessPolicy to broadcast */ broadcastAccessPolicy(accessPolicy) { if (!accessPolicy) { @@ -3519,22 +3681,22 @@ define([ const policy = _.clone(accessPolicy); this.packageModel.set("accessPolicy", policy); - // Stop now if the package is new because we don't want force - // a save just yet + // Stop now if the package is new because we don't want force a save + // just yet if (this.packageModel.isNew()) { return; } this.packageModel.on("sysMetaUpdateError", (_e) => { - // Show a generic error. Any errors at this point are things the - // user can't really recover from. i.e., we've already checked - // that the user has changePermission perms and we've already - // re-tried the request a few times + // Show a generic error. Any errors at this point are things the user + // can't really recover from. i.e., we've already checked that the + // user has changePermission perms and we've already re-tried the + // request a few times const message = "There was an error sharing your dataset. Not all of your changes were applied."; - // TODO: Is this really the right way to hook into the editor's - // error notification mechanism? + // TODO: Is this really the right way to hook into the editor's error + // notification mechanism? MetacatUI.appView.eml211EditorView.saveError(message); }); @@ -3542,10 +3704,12 @@ define([ }, /** - * Tracks the upload status of DataONEObject models in this collection. If they are - * `loading` into the DOM or `in progress` of an upload to the server, they will be considered as "loading" files. - * @param {DataONEObject} [dataONEObject] - A model to begin tracking. Optional. If no DataONEObject is given, then only - * the number of loading files will be calcualted and set on the packageModel. + * Tracks the upload status of DataONEObject models in this collection. If + * they are `loading` into the DOM or `in progress` of an upload to the + * server, they will be considered as "loading" files. + * @param {DataONEObject} [dataONEObject] - A model to begin tracking. + * Optional. If no DataONEObject is given, then only the number of loading + * files will be calcualted and set on the packageModel. * @since 2.17.1 */ setLoadingFiles(dataONEObject) { @@ -3568,7 +3732,8 @@ define([ this.where({ uploadStatus: "l" }).length + this.where({ uploadStatus: "p" }).length; - // If all models in this DataPackage have finished loading, then mark the loading as complete + // If all models in this DataPackage have finished loading, then + // mark the loading as complete if (!newNumLoadingFiles) { this.packageModel.set({ isLoadingFiles: false, @@ -3583,9 +3748,10 @@ define([ }, /** - * Returns atLocation information found in this resourceMap - * for all the PIDs in this resourceMap - * @returns {object} - object with PIDs as key and atLocation paths as values + * Returns atLocation information found in this resourceMap for all the + * PIDs in this resourceMap + * @returns {object} - object with PIDs as key and atLocation paths as + * values * @since 2.28.0 */ getAtLocation() { @@ -3593,10 +3759,12 @@ define([ }, /** - * Get the absolute path from a relative path, handling '~', '..', and '.'. - * @param {string} relativePath - The relative path to be converted to an absolute path. - * @returns {string} - The absolute path after processing '~', '..', and '.'. - * If the result is empty, returns '/'. + * Get the absolute path from a relative path, handling '~', '..', and + * '.'. + * @param {string} relativePath - The relative path to be converted to an + * absolute path. + * @returns {string} The absolute path after processing '~', '..', and + * '.'. If the result is empty, returns '/'. * @since 2.28.0 */ getAbsolutePath(relativePath) {