diff --git a/build.gradle b/build.gradle index 7c2cdc65e..2c18d5340 100644 --- a/build.gradle +++ b/build.gradle @@ -20,7 +20,7 @@ plugins { id "com.gorylenko.gradle-git-properties" version "2.4.1" } -version "5.1-PWA-SNAPSHOT" +version "5.1-SNAPSHOT" group "au.org.ala" description "Ecodata" diff --git a/grails-app/conf/application.groovy b/grails-app/conf/application.groovy index c1b58c3c8..46115fef2 100644 --- a/grails-app/conf/application.groovy +++ b/grails-app/conf/application.groovy @@ -496,7 +496,24 @@ app { } checkForBoundaryIntersectionInLayers = [ "cl927", "cl11163" ] } - displayNames = [elect: "Electorate(s)", state: "State(s)"] + displayNames = [ + elect: [ + headerName: "Electorate(s)" + ], + state: [ + headerName: "State(s)", + mappings: [ + "Northern Territory": ["Northern Territory (including Coastal Waters)", "NT"], + "Tasmania": ["Tasmania (including Coastal Waters)", "TAS"], + "New South Wales": ["New South Wales (including Coastal Waters)", "NSW"], + "Victoria": ["Victoria (including Coastal Waters)", "VIC"], + "Queensland": ["Queensland (including Coastal Waters)", "QLD"], + "South Australia": ["South Australia (including Coastal Waters)", "SA"], + "Australian Capital Territory": ["ACT"], + "Western Australia": ["Western Australia (including Coastal Waters)", "WA"] + ] + ] + ] } } /******************************************************************************\ diff --git a/grails-app/conf/data/mapping.json b/grails-app/conf/data/mapping.json index 93c7b44ae..24d20c6f7 100644 --- a/grails-app/conf/data/mapping.json +++ b/grails-app/conf/data/mapping.json @@ -70,10 +70,6 @@ "organisationId": { "type" : "keyword" }, - "orgIdSvcProvider": { - "type" : "keyword", - "copy_to": ["organisationId"] - }, "organisationName": { "type" : "text", "copy_to": ["organisationFacet", "organisationSort"] diff --git a/grails-app/controllers/au/org/ala/ecodata/ProjectController.groovy b/grails-app/controllers/au/org/ala/ecodata/ProjectController.groovy index 3b6680a33..22c77d8c2 100644 --- a/grails-app/controllers/au/org/ala/ecodata/ProjectController.groovy +++ b/grails-app/controllers/au/org/ala/ecodata/ProjectController.groovy @@ -324,6 +324,15 @@ class ProjectController { render result as JSON } + def findStateAndElectorateForProject() { + if (!params.projectId) { + render status:400, text: "projectId is a required parameter" + } else { + Map project = projectService.get(params.projectId) + asJson projectService.findStateAndElectorateForProject(project) + } + } + def findByName() { if (!params.projectName) { render status:400, text: "projectName is a required parameter" diff --git a/grails-app/controllers/au/org/ala/ecodata/SpatialController.groovy b/grails-app/controllers/au/org/ala/ecodata/SpatialController.groovy new file mode 100644 index 000000000..39fe0c520 --- /dev/null +++ b/grails-app/controllers/au/org/ala/ecodata/SpatialController.groovy @@ -0,0 +1,134 @@ +package au.org.ala.ecodata + +import au.org.ala.ecodata.spatial.SpatialConversionUtils +import au.org.ala.ecodata.spatial.SpatialUtils +import org.apache.commons.fileupload.servlet.ServletFileUpload +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.tuple.Pair +import org.locationtech.jts.geom.Geometry +import org.springframework.web.multipart.MultipartFile + +import javax.servlet.http.HttpServletResponse +@au.ala.org.ws.security.RequireApiKey(scopesFromProperty=["app.readScope"]) +class SpatialController { + SpatialService spatialService + static responseFormats = ['json', 'xml'] + static allowedMethods = [uploadShapeFile: "POST", getShapeFileFeatureGeoJson: "GET"] + + @au.ala.org.ws.security.RequireApiKey(scopesFromProperty=["app.writeScope"]) + def uploadShapeFile() { + // Use linked hash map to maintain key ordering + Map retMap = new LinkedHashMap() + + File tmpZipFile = File.createTempFile("shpUpload", ".zip") + + if (ServletFileUpload.isMultipartContent(request)) { + // Parse the request + Map items = request.getFileMap() + + if (items.size() == 1) { + MultipartFile fileItem = items.values()[0] + IOUtils.copy(fileItem.getInputStream(), new FileOutputStream(tmpZipFile)) + retMap.putAll(handleZippedShapeFile(tmpZipFile)) + response.setStatus(HttpServletResponse.SC_OK) + } else { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST) + retMap.put("error", "Multiple files sent in request. A single zipped shape file should be supplied.") + } + } + + respond retMap + } + + @au.ala.org.ws.security.RequireApiKey(scopesFromProperty=["app.writeScope"]) + def getShapeFileFeatureGeoJson() { + Map retMap + String shapeId = params.shapeFileId + String featureIndex = params.featureId + if (featureIndex != null && shapeId != null) { + + retMap = processShapeFileFeatureRequest(shapeId, featureIndex) + if(retMap.geoJson == null) { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST) + } + else { + response.setStatus(HttpServletResponse.SC_OK) + } + } + else { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST) + retMap = ["error": "featureId and shapeFileId must be supplied"] + } + + respond retMap + } + + def features() { + def retVariable + if (!params.layerId) { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST) + retVariable = ["error": "layerId must be supplied"] + } + else { + List intersectWith = params.intersectWith?.split(",") ?: [] + retVariable = spatialService.features(params.layerId, intersectWith) + } + + respond retVariable + } + + private Map processShapeFileFeatureRequest(String shapeFileId, String featureIndex) { + Map retMap = new HashMap() + + try { + File shpFileDir = new File(System.getProperty("java.io.tmpdir"), shapeFileId) + Geometry geoJson = SpatialUtils.getShapeFileFeaturesAsGeometry(shpFileDir, featureIndex) + + if (geoJson == null) { + retMap.put("error", "Invalid geometry") + return retMap + } + else { + if (geoJson.getCoordinates().flatten().size() > grailsApplication.config.getProperty("shapefile.simplify.threshhold", Integer, 50_000)) { + geoJson = GeometryUtils.simplify(geoJson, grailsApplication.config.getProperty("shapefile.simplify.tolerance", Double, 0.0001)) + } + + retMap.put("geoJson", GeometryUtils.geometryToGeoJsonMap(geoJson, grailsApplication.config.getProperty("shapefile.geojson.decimal", Integer, 20))) + } + } catch (Exception ex) { + log.error("Error processsing shapefile feature request", ex) + retMap.put("error", ex.getMessage()) + } + + return retMap + } + + private static Map handleZippedShapeFile(File zippedShp) throws IOException { + // Use linked hash map to maintain key ordering + Map retMap = new LinkedHashMap() + + Pair idFilePair = SpatialConversionUtils.extractZippedShapeFile(zippedShp) + String uploadedShpId = idFilePair.getLeft() + File shpFile = idFilePair.getRight() + + retMap.put("shp_id", uploadedShpId) + + List>> manifestData = SpatialConversionUtils.getShapeFileManifest(shpFile) + + int featureIndex = 0 + for (List> featureData : manifestData) { + // Use linked hash map to maintain key ordering + Map featureDataMap = new LinkedHashMap() + + for (Pair fieldData : featureData) { + featureDataMap.put(fieldData.getLeft(), fieldData.getRight()) + } + + retMap.put(featureIndex, featureDataMap) + + featureIndex++ + } + + return retMap + } +} diff --git a/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy b/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy index 4e5d91d5d..0c93fd480 100644 --- a/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy +++ b/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy @@ -42,6 +42,9 @@ class UrlMappings { "/ws/output/getOutputSpeciesUUID/"(controller: "output"){ action = [GET:"getOutputSpeciesUUID"] } + "/ws/shapefile" (controller: "spatial"){ action = [POST:"uploadShapeFile"] } + "/ws/shapefile/geojson/$shapeFileId/$featureId"(controller: "spatial"){ action = [GET:"getShapeFileFeatureGeoJson"] } + "/ws/activitiesForProject/$id" { controller = 'activity' action = 'activitiesForProject' @@ -195,6 +198,7 @@ class UrlMappings { "/ws/project/getBiocollectFacets"(controller: "project"){ action = [GET:"getBiocollectFacets"] } "/ws/project/getDefaultFacets"(controller: "project", action: "getDefaultFacets") "/ws/project/$projectId/dataSet/$dataSetId/records"(controller: "project", action: "fetchDataSetRecords") + "/ws/project/findStateAndElectorateForProject"(controller: "project", action: "findStateAndElectorateForProject") "/ws/admin/initiateSpeciesRematch"(controller: "admin", action: "initiateSpeciesRematch") "/ws/dataSetSummary/$projectId/$dataSetId?"(controller :'dataSetSummary') { diff --git a/grails-app/domain/au/org/ala/ecodata/AssociatedOrg.groovy b/grails-app/domain/au/org/ala/ecodata/AssociatedOrg.groovy index eebb4855f..f052ad682 100644 --- a/grails-app/domain/au/org/ala/ecodata/AssociatedOrg.groovy +++ b/grails-app/domain/au/org/ala/ecodata/AssociatedOrg.groovy @@ -10,11 +10,29 @@ import groovy.transform.ToString @JsonIgnoreProperties(['metaClass', 'errors', 'expandoMetaClass']) class AssociatedOrg { + /** Reference to the Organisation entity if ecodata has a record of the Organisation */ String organisationId + + /** + * The name of the organisation in the context of the relationship. e.g. it could be a name used + * in a contract with a project that is different from the current business name of the organisation + */ String name String logo String url + /** + * The date the association started. A null date indicates the relationship started at the same + * time as the related entity. e.g. the start of a Project + */ + Date fromDate + + /** + * The date the association e ended. A null date indicates the relationship ended at the same + * time as the related entity. e.g. the end of a Project + */ + Date toDate + /** A description of the association - e.g. Service Provider, Grantee, Sponsor */ String description @@ -25,8 +43,11 @@ class AssociatedOrg { organisationId nullable: true name nullable: true logo nullable: true + url nullable: true description nullable: true + fromDate nullable: true + toDate nullable: true } } diff --git a/grails-app/domain/au/org/ala/ecodata/ExternalId.groovy b/grails-app/domain/au/org/ala/ecodata/ExternalId.groovy index 8ab58aed6..175387554 100644 --- a/grails-app/domain/au/org/ala/ecodata/ExternalId.groovy +++ b/grails-app/domain/au/org/ala/ecodata/ExternalId.groovy @@ -12,7 +12,7 @@ class ExternalId implements Comparable { enum IdType { INTERNAL_ORDER_NUMBER, TECH_ONE_CODE, WORK_ORDER, GRANT_AWARD, GRANT_OPPORTUNITY, RELATED_PROJECT, - MONITOR_PROTOCOL_INTERNAL_ID, MONITOR_PROTOCOL_GUID, TECH_ONE_CONTRACT_NUMBER, MONITOR_PLOT_GUID, + MONITOR_PROTOCOL_INTERNAL_ID, MONITOR_PROTOCOL_GUID, TECH_ONE_CONTRACT_NUMBER, TECH_ONE_PARTY_ID, MONITOR_PLOT_GUID, MONITOR_PLOT_SELECTION_GUID, MONITOR_MINTED_COLLECTION_ID, UNSPECIFIED } static constraints = { diff --git a/grails-app/domain/au/org/ala/ecodata/GeographicInfo.groovy b/grails-app/domain/au/org/ala/ecodata/GeographicInfo.groovy index e12cef264..5a14a7a7d 100644 --- a/grails-app/domain/au/org/ala/ecodata/GeographicInfo.groovy +++ b/grails-app/domain/au/org/ala/ecodata/GeographicInfo.groovy @@ -20,6 +20,12 @@ class GeographicInfo { /** Some projects don't have specific geographic areas and are flagged as being run nationwide */ boolean nationwide = false + /** A flag to indicate that the project is running statewide i.e. all electorates in a state */ + boolean statewide = false + + /** A flag to override calculated values for states and electorates with manually entered values */ + boolean isDefault = false + /** The primary state in which this project is running, if applicable */ String primaryState diff --git a/grails-app/domain/au/org/ala/ecodata/Organisation.groovy b/grails-app/domain/au/org/ala/ecodata/Organisation.groovy index 266d37ac1..6b41a63ee 100644 --- a/grails-app/domain/au/org/ala/ecodata/Organisation.groovy +++ b/grails-app/domain/au/org/ala/ecodata/Organisation.groovy @@ -22,17 +22,34 @@ class Organisation { String description String announcements String abn + String url + String abnStatus // N/A, Active, Cancelled + String entityName + String sourceSystem // MERIT or Collectory + String entityType // Type code from the ABN register + String orgType // Type name as selected in BioCollect/ Name from the ABN register + List businessNames + String state + Integer postcode + List externalIds // For financial system vendor codes/reference + List indigenousOrganisationRegistration + List associatedOrgs // e.g. parent organisation such as for NSW LLS group + List contractNames // When contracts are written for projects with this organisation with a name that doesn't match the organisation name + String status = Status.ACTIVE - String status = 'active' + /** Stores configuration information for how reports should be generated for this organisation (if applicable) */ + Map config String collectoryInstitutionId // Reference to the Collectory Date dateCreated Date lastUpdated + static embedded = ['externalIds', 'associatedOrgs'] static mapping = { organisationId index: true + name index:true version false } @@ -42,7 +59,21 @@ class Organisation { announcements nullable: true description nullable: true collectoryInstitutionId nullable: true + abnStatus nullable: true + entityName nullable: true + entityType nullable: true + orgType nullable: true + businessNames nullable: true + contractNames nullable: true + state nullable: true + postcode nullable: true + indigenousOrganisationRegistration nullable: true + associatedOrgs nullable: true abn nullable: true + url nullable: true + config nullable: true + sourceSystem nullable: true + externalIds nullable: true hubId nullable: true, validator: { String hubId, Organisation organisation, Errors errors -> GormMongoUtil.validateWriteOnceProperty(organisation, 'organisationId', 'hubId', errors) } diff --git a/grails-app/services/au/org/ala/ecodata/OrganisationService.groovy b/grails-app/services/au/org/ala/ecodata/OrganisationService.groovy index 2fe0eb635..a6031f1b1 100644 --- a/grails-app/services/au/org/ala/ecodata/OrganisationService.groovy +++ b/grails-app/services/au/org/ala/ecodata/OrganisationService.groovy @@ -3,6 +3,7 @@ package au.org.ala.ecodata import com.mongodb.client.MongoCollection import com.mongodb.client.model.Filters import grails.validation.ValidationException +import grails.web.databinding.DataBinder import org.bson.conversions.Bson import static au.org.ala.ecodata.Status.DELETED @@ -10,11 +11,13 @@ import static au.org.ala.ecodata.Status.DELETED /** * Works with Organisations, mostly CRUD operations at this point. */ -class OrganisationService { +class OrganisationService implements DataBinder { /** Use to include related projects in the toMap method */ public static final String PROJECTS = 'projects' + private static final List EXCLUDE_FROM_BINDING = ['organisationId', 'collectoryInstitutionId', 'status', 'id'] + static transactional = 'mongo' static final FLAT = 'flat' @@ -40,10 +43,10 @@ class OrganisationService { } def list(levelOfDetail = []) { - return Organisation.findAllByStatusNotEqual('deleted').collect{toMap(it, levelOfDetail)} + return Organisation.findAllByStatusNotEqual(DELETED).collect{toMap(it, levelOfDetail)} } - def create(Map props, boolean createInCollectory = true) { + def create(Map props, boolean createInCollectory = false) { def organisation = new Organisation(organisationId: Identifiers.getNew(true, ''), name:props.name) @@ -51,12 +54,8 @@ class OrganisationService { organisation.collectoryInstitutionId = createCollectoryInstitution(props) } try { - // name is a mandatory property and hence needs to be set before dynamic properties are used (as they trigger validations) + bindData(organisation, props, [exclude:EXCLUDE_FROM_BINDING]) organisation.save(failOnError: true, flush:true) - props.remove('id') - props.remove('organisationId') - props.remove('collectoryInstitutionId') - commonService.updateProperties(organisation, props) // Assign the creating user as an admin. permissionService.addUserAsRoleToOrganisation(userService.getCurrentUserDetails()?.userId, AccessLevel.admin, organisation.organisationId) @@ -91,23 +90,30 @@ class OrganisationService { return institutionId } - def update(String id, props) { + def update(String id, props, boolean createInCollectory = false) { def organisation = Organisation.findByOrganisationId(id) if (organisation) { try { - String oldName = organisation.name - commonService.updateProperties(organisation, props) // if no collectory institution exists for this organisation, create one - if (!organisation.collectoryInstitutionId || organisation.collectoryInstitutionId == 'null' || organisation.collectoryInstitutionId == '') { - props.collectoryInstitutionId = createCollectoryInstitution(props) + // We shouldn't be doing this unless the org is attached to a project that exports data + // to the ALA. + if (createInCollectory && (!organisation.collectoryInstitutionId || organisation.collectoryInstitutionId == 'null' || organisation.collectoryInstitutionId == '')) { + organisation.collectoryInstitutionId = createCollectoryInstitution(props) } +œ + String oldName = organisation.name + List contractNameChanges = props.remove('contractNameChanges') + bindData(organisation, props, [exclude:EXCLUDE_FROM_BINDING]) - getCommonService().updateProperties(organisation, props) if (props.name && (oldName != props.name)) { - projectService.updateOrganisationName(organisation.organisationId, props.name) + projectService.updateOrganisationName(organisation.organisationId, oldName, props.name) + } + contractNameChanges?.each { Map change -> + projectService.updateOrganisationName(organisation.organisationId, change.oldName, change.newName) } + organisation.save(failOnError:true) return [status:'ok'] } catch (Exception e) { Organisation.withSession { session -> session.clear() } @@ -136,7 +142,7 @@ class OrganisationService { if (destroy) { organisation.delete() } else { - organisation.status = 'deleted' + organisation.status = DELETED organisation.save(flush: true, failOnError: true) } return [status: 'ok'] @@ -160,7 +166,6 @@ class OrganisationService { if ('projects' in levelOfDetail) { mapOfProperties.projects = [] mapOfProperties.projects += projectService.search([organisationId: org.organisationId], ['flat']) - mapOfProperties.projects += projectService.search([orgIdSvcProvider: org.organisationId], ['flat']) } if ('documents' in levelOfDetail) { mapOfProperties.documents = documentService.findAllByOwner('organisationId', org.organisationId) diff --git a/grails-app/services/au/org/ala/ecodata/ProjectService.groovy b/grails-app/services/au/org/ala/ecodata/ProjectService.groovy index 47af6127f..33919cd94 100644 --- a/grails-app/services/au/org/ala/ecodata/ProjectService.groovy +++ b/grails-app/services/au/org/ala/ecodata/ProjectService.groovy @@ -4,7 +4,6 @@ import au.org.ala.ecodata.converter.SciStarterConverter import grails.converters.JSON import grails.core.GrailsApplication import groovy.json.JsonSlurper -import org.springframework.scheduling.annotation.Scheduled; import org.springframework.context.MessageSource import org.springframework.web.servlet.i18n.SessionLocaleResolver @@ -286,7 +285,9 @@ class ProjectService { if (it.organisationId) { Organisation org = Organisation.findByOrganisationId(it.organisationId) if (org) { - it.name = org.name + if (!it.name) { // Is this going to cause BioCollect an issue? + it.name = org.name + } it.url = org.url it.logo = Document.findByOrganisationIdAndRoleAndStatus(it.organisationId, "logo", ACTIVE)?.thumbnailUrl } @@ -689,15 +690,36 @@ class ProjectService { List search(Map searchCriteria, levelOfDetail = []) { def criteria = Project.createCriteria() + def projects = criteria.list { ne("status", DELETED) searchCriteria.each { prop, value -> + // Special case for organisationId - also included embedded associatedOrg relationships. + if (prop == 'organisationId') { + or { + if (value instanceof List) { + inList(prop, value) + } else { + eq(prop, value) + } - if (value instanceof List) { - inList(prop, value) - } else { - eq(prop, value) + associatedOrgs { + if (value instanceof List) { + inList(prop, value) + } else { + eq(prop, value) + } + } + } + } + else { + if (value instanceof List) { + inList(prop, value) + } else { + eq(prop, value) + } } + } } @@ -721,10 +743,28 @@ class ProjectService { * @param orgId identifies the organsation that has changed name * @param orgName the new organisation name */ - void updateOrganisationName(orgId, orgName) { - Project.findAllByOrganisationIdAndStatusNotEqual(orgId, DELETED).each { project -> - project.organisationName = orgName - project.save() + void updateOrganisationName(String orgId, String oldName, String newName) { + Project.findAllByOrganisationIdAndOrganisationNameAndStatusNotEqual(orgId, oldName, DELETED).each { project -> + project.organisationName = newName + project.save(flush:true) + } + + List projects = Project.where { + status != DELETED + associatedOrgs { + organisationId == orgId + name == oldName + } + }.list() + + + projects?.each { Project project -> + project.associatedOrgs.each { org -> + if (org.organisationId == orgId && org.name == oldName) { + org.name = newName + } + } + project.save(flush:true) } } @@ -1208,6 +1248,55 @@ class ProjectService { [] } + /** + * Find primary/other state(s)/electorate(s) for a project. + * 1. If isDefault is true, use manually assigned state(s)/electorate(s) i.e project.geographicInfo. + * 2. If isDefault is false or missing, use the state(s)/electorate(s) from sites using site precedence. + * 3. If isDefault is false and there are no sites, use manual state(s)/electorate(s) in project.geographicInfo. + */ + Map findStateAndElectorateForProject(Map project) { + Map result = [:] + if(project == null) { + return result + } + + Map geographicInfo = project?.geographicInfo + // isDefault is false or missing + if (geographicInfo == null || (geographicInfo.isDefault == false)) { + Map intersections = orderLayerIntersectionsByAreaOfProjectSites(project) + Map config = metadataService.getGeographicConfig() + List intersectionLayers = config.checkForBoundaryIntersectionInLayers + intersectionLayers?.each { layer -> + Map facetName = metadataService.getGeographicFacetConfig(layer) + if (facetName.name) { + List intersectionValues = intersections[layer] + if (intersectionValues) { + result["primary${facetName.name}"] = intersectionValues.pop() + result["other${facetName.name}"] = intersectionValues.join("; ") + } + } + else + log.error ("No facet config found for layer $layer.") + } + } + + //isDefault is true or false and no sites. + if (geographicInfo) { + // load from manually assigned electorates/states + if (!result.containsKey("primaryelect")) { + result["primaryelect"] = geographicInfo.primaryElectorate + result["otherelect"] = geographicInfo.otherElectorates?.join("; ") + } + + if (!result.containsKey("primarystate")) { + result["primarystate"] = geographicInfo.primaryState + result["otherstate"] = geographicInfo.otherStates?.join("; ") + } + } + + result + } + /** * Returns a distinct list of hubIds for the supplied projects. * @param projects diff --git a/grails-app/services/au/org/ala/ecodata/SpatialService.groovy b/grails-app/services/au/org/ala/ecodata/SpatialService.groovy index e48b1af84..cf51f62fb 100644 --- a/grails-app/services/au/org/ala/ecodata/SpatialService.groovy +++ b/grails-app/services/au/org/ala/ecodata/SpatialService.groovy @@ -7,6 +7,9 @@ import groovy.json.JsonSlurper import org.locationtech.jts.geom.* import org.locationtech.jts.io.WKTReader +import java.util.regex.Matcher +import java.util.regex.Pattern + import static ParatooService.deepCopy /** * The SpatialService is responsible for: @@ -26,9 +29,11 @@ class SpatialService { WebService webService MetadataService metadataService + CacheService cacheService GrailsApplication grailsApplication Map lookupTable + Map synonymLookupTable = [:] public SpatialService() { JsonSlurper js = new JsonSlurper() @@ -214,10 +219,12 @@ class SpatialService { Map intersectionAreaByFacets = [:].withDefault { [:] } response?.each { String fid, List matchingObjects -> filteredResponse[fid] = [] + Map facetConfig = metadataService.getGeographicFacetConfig(fid) // check for boundary intersection object for selected layers defined in config. if (checkForBoundaryIntersectionInLayers.contains(fid)) { matchingObjects.each { Map obj -> String boundaryPid = obj.pid + String objName = obj.name = standardiseSpatialLayerObjectName(obj.name, facetConfig.name) if (boundaryPid) { log.debug("Intersecting ${obj.fieldname}(${fid}) - ${obj.name} ") // Get geoJSON of the object stored in spatial portal @@ -241,9 +248,9 @@ class SpatialService { if (isValidGeometryIntersection(mainGeometry, boundaryGeometry)) { filteredResponse[fid].add(obj) def (intersectionAreaOfMainGeometry, area) = getIntersectionProportionAndArea(mainGeometry, boundaryGeometry) - intersectionAreaByFacets[fid][obj.name] = area + intersectionAreaByFacets[fid][objName] = area } else { - log.debug("Filtered out ${obj.fieldname}(${fid}) - ${obj.name}") + log.debug("Filtered out ${obj.fieldname}(${fid}) - ${objName}") } end = System.currentTimeMillis() @@ -377,6 +384,102 @@ class SpatialService { GeometryUtils.wktToGeoJson(resp) } + /** + * Fetch spatial layer objects and standardise object names. + * @param layerId + * @return + */ + List features (String layerId, List intersectWith = []) { + cacheService.get("features-${layerId}-intersect-with-${intersectWith.join('')}", { + def resp = webService.getJson("${grailsApplication.config.getProperty('spatial.baseUrl')}/ws/objects/${layerId}") + Map facetName = null + try { + facetName = metadataService.getGeographicFacetConfig(layerId) + if(resp instanceof List) { + resp.sort { it.name } + List objects = resp.collect { obj -> + obj.name = standardiseSpatialLayerObjectName(obj.name, facetName.name) + + intersectWith.each { String fid -> + def intersectedObjects = webService.getJson("${grailsApplication.config.getProperty('spatial.baseUrl')}/ws/intersect/object/${fid}/${obj.pid}") + if (intersectedObjects instanceof List) { + Map facetConfig = metadataService.getGeographicFacetConfig(fid) + intersectedObjects.sort { it.name } + obj[(facetConfig.name)] = obj[fid] = intersectedObjects.collect { standardiseSpatialLayerObjectName(it.name, facetConfig.name) } + } + } + + obj + } + + + return objects + } + } + catch (IllegalArgumentException e) { + log.error("Error getting facet config for layer $layerId") + } + + return [] + }, 365) as List + } + + /** + * Get mapping for a facet from config + * @param facetName + * @return + */ + Map getDisplayNamesForFacet(String facetName) { + Map lookupTable = grailsApplication.config.getProperty('app.facets.displayNames', Map) + if (facetName) { + return lookupTable[facetName]?.mappings ?: [:] + } + } + + /** + * Spatial portal returns the object name in a variety of formats. This function formats the object name to a more + * consistent way. For example, depending on layer used New South Wales is sometimes called "New South Wales (including Coastal Waters)". + * @param name - name of the object + * @param synonymTable - expected data format - ["New South Wales": ["New South Wales (including Coastal Waters)", "NSW"], "Australian Capital Territory": ["ACT"]] + * @return + */ + String standardiseSpatialLayerObjectName(String name, Map synonymTable, String facetName) { + if (name) { + name = name.trim().toLowerCase() + // initialise a Map that stores the inverse of mappings. ["act": "Australian Capital Territory", "nsw": "New South Wales"] + if (synonymLookupTable[facetName] == null) { + synonymLookupTable[facetName] = synonymTable?.collectEntries { k, List v -> v.collectEntries { v1 -> [(v1.toLowerCase()): k] } } + } + + synonymLookupTable[facetName]?.get(name) ?: titleCase(name) + } + } + + String titleCase(String name) { + Pattern pattern = Pattern.compile("\\b\\w") + Matcher matcher = pattern.matcher(name.toLowerCase()) + StringBuffer capitalizedName = new StringBuffer() + + // Capitalize each matched letter and append it to the result + while (matcher.find()) { + matcher.appendReplacement(capitalizedName, matcher.group().toUpperCase()) + } + matcher.appendTail(capitalizedName) + + return capitalizedName.toString() + } + + /** + * Provide a facet name such as "state", "elect" etc. to get standardised object name. + * @param name - object name such as "New South Wales (including Coastal Waters)" + * @param facetName - facet name such as "state", "elect" + * @return + */ + String standardiseSpatialLayerObjectName(String name, String facetName) { + Map lookupTable = getDisplayNamesForFacet(facetName) + standardiseSpatialLayerObjectName(name, lookupTable, facetName) + } + /** * Converts the response from the spatial portal into geographic facets, taking into account the facet * configuration (whether the facet is made up of a single layer or a group of layers). @@ -393,11 +496,11 @@ class SpatialService { // Grouped facets combine multiple layers into a single facet. If the site intersects with // any object in the layer, then that layer is added as a matching value to the facet. if (matchingObjects) { - result[facetConfig.name].add(matchingObjects[0].fieldname) + result[facetConfig.name].add(standardiseSpatialLayerObjectName(matchingObjects[0].fieldname as String, facetConfig.name as String)) } } else { - result[facetConfig.name] = matchingObjects.collect{it.name} + result[facetConfig.name] = matchingObjects.collect{standardiseSpatialLayerObjectName(it.name as String, facetConfig.name as String)} } } result diff --git a/package-lock.json b/package-lock.json index 7a7183dec..6158a3e9d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1317,9 +1317,9 @@ } }, "node_modules/cookie": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz", - "integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==", + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", "dev": true, "engines": { "node": ">= 0.6" @@ -1543,9 +1543,9 @@ } }, "node_modules/engine.io": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.5.5.tgz", - "integrity": "sha512-C5Pn8Wk+1vKBoHghJODM63yk8MvrO9EWZUfkAt5HAqIgPE4/8FF0PEGHXtEd40l223+cE5ABWuPzm38PHFXfMA==", + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.2.tgz", + "integrity": "sha512-gmNvsYi9C8iErnZdVcJnvCpSKbWTt1E8+JZo8b+daLninywUWi5NQ5STSHZ9rFjFO7imNcvb8Pc5pe/wMR5xEw==", "dev": true, "dependencies": { "@types/cookie": "^0.4.1", @@ -1553,7 +1553,7 @@ "@types/node": ">=10.0.0", "accepts": "~1.3.4", "base64id": "2.0.0", - "cookie": "~0.4.1", + "cookie": "~0.7.2", "cors": "~2.8.5", "debug": "~4.3.1", "engine.io-parser": "~5.2.1", @@ -1564,9 +1564,9 @@ } }, "node_modules/engine.io-parser": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.2.tgz", - "integrity": "sha512-RcyUFKA93/CXH20l4SoVvzZfrSDMOTUS3bWVpTt2FuFP+XYrL8i8oonHP7WInRyVHXh0n/ORtoeiE1os+8qkSw==", + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz", + "integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==", "dev": true, "engines": { "node": ">=10.0.0" @@ -3673,16 +3673,16 @@ } }, "node_modules/socket.io": { - "version": "4.7.5", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.7.5.tgz", - "integrity": "sha512-DmeAkF6cwM9jSfmp6Dr/5/mfMwb5Z5qRrSXLpo3Fq5SqyU8CMF15jIN4ZhfSwu35ksM1qmHZDQ/DK5XTccSTvA==", + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.0.tgz", + "integrity": "sha512-8U6BEgGjQOfGz3HHTYaC/L1GaxDCJ/KM0XTkJly0EhZ5U/du9uNEZy4ZgYzEzIqlx2CMm25CrCqr1ck899eLNA==", "dev": true, "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "cors": "~2.8.5", "debug": "~4.3.2", - "engine.io": "~6.5.2", + "engine.io": "~6.6.0", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.4" }, @@ -5404,9 +5404,9 @@ } }, "cookie": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz", - "integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==", + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", "dev": true }, "cors": { @@ -5571,9 +5571,9 @@ } }, "engine.io": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.5.5.tgz", - "integrity": "sha512-C5Pn8Wk+1vKBoHghJODM63yk8MvrO9EWZUfkAt5HAqIgPE4/8FF0PEGHXtEd40l223+cE5ABWuPzm38PHFXfMA==", + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.2.tgz", + "integrity": "sha512-gmNvsYi9C8iErnZdVcJnvCpSKbWTt1E8+JZo8b+daLninywUWi5NQ5STSHZ9rFjFO7imNcvb8Pc5pe/wMR5xEw==", "dev": true, "requires": { "@types/cookie": "^0.4.1", @@ -5581,7 +5581,7 @@ "@types/node": ">=10.0.0", "accepts": "~1.3.4", "base64id": "2.0.0", - "cookie": "~0.4.1", + "cookie": "~0.7.2", "cors": "~2.8.5", "debug": "~4.3.1", "engine.io-parser": "~5.2.1", @@ -5589,9 +5589,9 @@ } }, "engine.io-parser": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.2.tgz", - "integrity": "sha512-RcyUFKA93/CXH20l4SoVvzZfrSDMOTUS3bWVpTt2FuFP+XYrL8i8oonHP7WInRyVHXh0n/ORtoeiE1os+8qkSw==", + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz", + "integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==", "dev": true }, "enhanced-resolve": { @@ -7147,16 +7147,16 @@ } }, "socket.io": { - "version": "4.7.5", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.7.5.tgz", - "integrity": "sha512-DmeAkF6cwM9jSfmp6Dr/5/mfMwb5Z5qRrSXLpo3Fq5SqyU8CMF15jIN4ZhfSwu35ksM1qmHZDQ/DK5XTccSTvA==", + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.0.tgz", + "integrity": "sha512-8U6BEgGjQOfGz3HHTYaC/L1GaxDCJ/KM0XTkJly0EhZ5U/du9uNEZy4ZgYzEzIqlx2CMm25CrCqr1ck899eLNA==", "dev": true, "requires": { "accepts": "~1.3.4", "base64id": "~2.0.0", "cors": "~2.8.5", "debug": "~4.3.2", - "engine.io": "~6.5.2", + "engine.io": "~6.6.0", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.4" } diff --git a/scripts/releases/5.1/updateAssociatedOrgNames.js b/scripts/releases/5.1/updateAssociatedOrgNames.js new file mode 100644 index 000000000..43ed54988 --- /dev/null +++ b/scripts/releases/5.1/updateAssociatedOrgNames.js @@ -0,0 +1,30 @@ +load('../../utils/audit.js'); +let projects = db.project.find({status:{$ne:'deleted'}, associatedOrgs:{$exists:true}, isMERIT:false}); +while (projects.hasNext()) { + let changed = false; + + let project = projects.next(); + let associatedOrgs = project.associatedOrgs; + if (associatedOrgs) { + for (let i = 0; i < associatedOrgs.length; i++) { + if (associatedOrgs[i].organisationId) { + let org = db.organisation.findOne({organisationId: associatedOrgs[i].organisationId}); + if (org) { + if (org.name != associatedOrgs[i].name) { + print("Updating associated org for project " + project.projectId + " from " + associatedOrgs[i].name + " to " + org.name); + associatedOrgs[i].name = org.name; + changed = true; + } + } else { + print("No organisation found for associated org " + associatedOrgs[i].organisationId + " in project " + project.projectId); + } + + } + } + if (changed) { + db.project.replaceOne({projectId: project.projectId}, project); + audit(project, project.projectId, 'au.org.ala.ecodata.Project', 'system'); + } + + } +} \ No newline at end of file diff --git a/scripts/releases/5.1/updateSiteLocationMetadata.js b/scripts/releases/5.1/updateSiteLocationMetadata.js new file mode 100644 index 000000000..dc7f98424 --- /dev/null +++ b/scripts/releases/5.1/updateSiteLocationMetadata.js @@ -0,0 +1,176 @@ +load('../../utils/audit.js'); +const intersection = "intersectionAreaByFacets" +let lookupTable = { + "state": { + "Northern Territory": ["Northern Territory (including Coastal Waters)", "NT"], + "Tasmania": ["Tasmania (including Coastal Waters)", "TAS"], + "New South Wales": ["New South Wales (including Coastal Waters)", "NSW"], + "Victoria": ["Victoria (including Coastal Waters)", "VIC"], + "Queensland": ["Queensland (including Coastal Waters)", "QLD"], + "South Australia": ["South Australia (including Coastal Waters)", "SA"], + "Australian Capital Territory": ["ACT"], + "Western Australia": ["Western Australia (including Coastal Waters)", "WA"] + } +} + +const propertiesToStandardize = ["state", "elect"]; +function standardiseSpatialLayerObjectName(name, property) { + if (name) { + let lookupTableForProperty = lookupTable[property]; + name = name.trim().toLowerCase(); + if (lookupTableForProperty) { + let manyToOneMappingTable = `${property}Synonym`; + if (!lookupTable[manyToOneMappingTable]) { + const mappings = {}; + const keys= Object.keys(lookupTableForProperty || {}) + for (let i = 0; i < keys.length; i++) { + let key = keys[i]; + let values = lookupTableForProperty[key]; + values.forEach(value => { + mappings[value.toLowerCase()] = key; + }); + } + + lookupTable[manyToOneMappingTable] = mappings; + } + + if (lookupTable[manyToOneMappingTable][name]) + return lookupTable[manyToOneMappingTable][name]; + } + + return name.replace(/\b\w/g, char => char.toUpperCase()); + } + + return null; +} + +/** + * + * @param geometry = { + * "state": ["New South Wales"], + * "elect": "Page" + * } + * @param updated + * @returns {*} + */ +function standardiseFacetValues(geometry, updated) { + propertiesToStandardize.forEach(property => { + let value = geometry[property]; + if (value) { + if (typeof value === "string"){ + value = geometry[property] = [value]; + updated = true; + } + + if (value.length > 0) { + value.forEach((item, index) => { + let standardizedValue = standardiseSpatialLayerObjectName(item, property); + if (standardizedValue !== item) { + value[index] = standardizedValue; + updated = true; + } + }); + } + } + }); + + return updated; +} + +/** + * @param geometry = { + * "state": ["New South Wales"], + * "intersectionAreaByFacets": { + * "state": { + * "CURRENT": { + * "New South Wales": 0 + * }, + * "cl927": { + * "New South Wales": 0 + * } + * }, + * "elect": { + * "CURRENT": { + * "Page": 0 + * }, + * "cl11163": { + * "Page": 0 + * } + * } + * } + * } + * @param updated + * @returns updated + */ +function standardiseIntersectionAreaByFacets(geometry, updated) { + let intersectionAreaByFacets = geometry[intersection]; + if (intersectionAreaByFacets) { + var facets = Object.keys(intersectionAreaByFacets); + for (let i = 0; i < facets.length; i++) { + let facet = facets[i]; + let layersSpatialNamesAndArea = intersectionAreaByFacets[facet]; + let layers = Object.keys(layersSpatialNamesAndArea); + for (let j = 0; j < layers.length; j++) { + let layer = layers[j]; + let spatialNamesAndArea = layersSpatialNamesAndArea[layer]; + let spatialNames = Object.keys(spatialNamesAndArea); + let newSpatialValuesAndArea = {}; + for (let k = 0; k < spatialNames.length; k++) { + let newSpatialValue = standardiseSpatialLayerObjectName(spatialNames[k], facet); + newSpatialValuesAndArea[newSpatialValue] = spatialNamesAndArea[spatialNames[k]]; + updated = true; + } + + layersSpatialNamesAndArea[layer] = newSpatialValuesAndArea; + } + } + } + + return updated; +} + +function standardiseGeographicInfo(geographicInfo) { + if (geographicInfo) { + geographicInfo.primaryState = standardiseSpatialLayerObjectName(geographicInfo.primaryState, "state"); + if (geographicInfo.otherStates) { + geographicInfo.otherStates = geographicInfo.otherStates.map(state => standardiseSpatialLayerObjectName(state, "state")); + } + + geographicInfo.primaryElectorate = standardiseSpatialLayerObjectName(geographicInfo.primaryElectorate, "elect"); + if (geographicInfo.otherElectorates) { + geographicInfo.otherElectorates = geographicInfo.otherElectorates.map(elect => standardiseSpatialLayerObjectName(elect, "elect")); + } + } + + return geographicInfo; +} + +db.site.find({"extent.geometry": {$exists: true}}).forEach(site => { + let updated = false; + let geometry = site.extent && site.extent.geometry; + if (geometry) { + updated = standardiseFacetValues(geometry, updated); + updated = standardiseIntersectionAreaByFacets(geometry, updated); + + if (updated) { + print(`Updating site ${site.siteId}`); + db.site.updateOne({siteId: site.siteId}, {$set: {"extent.geometry": geometry}}); + audit(site, site.siteId, 'au.org.ala.ecodata.Site', 'system'); + } + } +}); + +print("Completed sites"); + +db.project.find({ + "geographicInfo": {$exists: true} +}).forEach(project => { + print(`Updating project ${project.projectId}`); + if(project.geographicInfo) { + project.geographicInfo = standardiseGeographicInfo(project.geographicInfo); + db.project.updateOne({projectId: project.projectId}, {$set: {"geographicInfo": project.geographicInfo}}); + audit(project, project.projectId, 'au.org.ala.ecodata.Project', 'system'); + } +}); + +print("Completed projects"); \ No newline at end of file diff --git a/src/integration-test/groovy/au/org/ala/ecodata/OrganisationControllerSpec.groovy b/src/integration-test/groovy/au/org/ala/ecodata/OrganisationControllerSpec.groovy index d6ce0b57e..c989089f1 100644 --- a/src/integration-test/groovy/au/org/ala/ecodata/OrganisationControllerSpec.groovy +++ b/src/integration-test/groovy/au/org/ala/ecodata/OrganisationControllerSpec.groovy @@ -75,8 +75,6 @@ class OrganisationControllerSpec extends IntegrationTestHelper { savedOrganisation.organisationId == organisationId savedOrganisation.name == org.name savedOrganisation.description == org.description - // savedOrganisation.dynamicProperty == org.dynamicProperty (dynamic properties not working in tests) - savedOrganisation.collectoryInstitutionId == institutionId and: "the user who created the organisation is an admin of the new organisation" def orgPermissions = UserPermission.findAllByEntityIdAndEntityType(savedOrganisation.organisationId, Organisation.class.name) @@ -119,14 +117,14 @@ class OrganisationControllerSpec extends IntegrationTestHelper { } - void "projects can be associated with an organisation by the serviceProviderOrganisationId property"() { + void "projects can be associated with an organisation by the associatedOrgs property"() { setup: // Create some data for the database. def organisation = TestDataHelper.buildOrganisation([name: 'org 1']) def projects = [] (1..2).each { - projects << TestDataHelper.buildProject([orgIdSvcProvider: organisation.organisationId, name:'svc project '+it]) + projects << TestDataHelper.buildProject([associatedOrgs: [[organisationId:organisation.organisationId, name:'org project '+it]]]) } projects << TestDataHelper.buildProject([organisationId: organisation.organisationId, name:'org project']) (1..3).each { diff --git a/src/integration-test/groovy/au/org/ala/ecodata/OrganisationServiceIntegrationSpec.groovy b/src/integration-test/groovy/au/org/ala/ecodata/OrganisationServiceIntegrationSpec.groovy index 4e7688fcd..14d565659 100644 --- a/src/integration-test/groovy/au/org/ala/ecodata/OrganisationServiceIntegrationSpec.groovy +++ b/src/integration-test/groovy/au/org/ala/ecodata/OrganisationServiceIntegrationSpec.groovy @@ -41,7 +41,7 @@ class OrganisationServiceIntegrationSpec extends IntegrationTestHelper { // setupPost(organisationController.request, org) when: "creating an organisation" - def result = organisationService.create(org) + def result = organisationService.create(org, true) then: "ensure we get a response including an organisationId" // def resp = extractJson(organisationController.response.text) @@ -55,18 +55,17 @@ class OrganisationServiceIntegrationSpec extends IntegrationTestHelper { Organisation.withTransaction { savedOrganisation = organisationService.get(organisationId) } - //organisationController.response.reset() - // organisationController.get(organisationId) - - // def savedOrganisation = extractJson(organisationController.response.text) then: "ensure the properties are the same as the original" savedOrganisation.organisationId == organisationId savedOrganisation.name == org.name savedOrganisation.description == org.description - savedOrganisation.dynamicProperty == org.dynamicProperty savedOrganisation.collectoryInstitutionId == institutionId + and: "The OrganisationService no longer supports dynamic properties" + savedOrganisation.dynamicProperty == null + + and: "the user who created the organisation is an admin of the new organisation" def orgPermissions = UserPermission.findAllByEntityIdAndEntityType(savedOrganisation.organisationId, Organisation.class.name) orgPermissions.size() == 1 @@ -112,7 +111,7 @@ class OrganisationServiceIntegrationSpec extends IntegrationTestHelper { } - void "projects can be associated with an organisation by the serviceProviderOrganisationId property"() { + void "projects can be associated with an organisation by the associatedOrgs property"() { setup: def organisation @@ -122,7 +121,7 @@ class OrganisationServiceIntegrationSpec extends IntegrationTestHelper { organisation = TestDataHelper.buildOrganisation([name: 'Test Organisation2']) def projects = [] (1..2).each { - projects << TestDataHelper.buildProject([orgIdSvcProvider: organisation.organisationId]) + projects << TestDataHelper.buildProject([associatedOrgs: [[organisationId:organisation.organisationId, name:'org project '+it]]]) } projects << TestDataHelper.buildProject([organisationId: organisation.organisationId]) (1..3).each { diff --git a/src/integration-test/groovy/au/org/ala/ecodata/SpatialControllerIntegrationSpec.groovy b/src/integration-test/groovy/au/org/ala/ecodata/SpatialControllerIntegrationSpec.groovy new file mode 100644 index 000000000..1e17d86a7 --- /dev/null +++ b/src/integration-test/groovy/au/org/ala/ecodata/SpatialControllerIntegrationSpec.groovy @@ -0,0 +1,74 @@ +package au.org.ala.ecodata + +import grails.testing.mixin.integration.Integration +import grails.util.GrailsWebMockUtil +import groovy.json.JsonSlurper +import org.apache.http.HttpStatus +import org.grails.plugins.testing.GrailsMockHttpServletRequest +import org.grails.plugins.testing.GrailsMockHttpServletResponse +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.mock.web.MockMultipartFile +import org.springframework.web.context.WebApplicationContext +import spock.lang.Specification + +@Integration +class SpatialControllerIntegrationSpec extends Specification { + + @Autowired + SpatialController spatialController + + @Autowired + WebApplicationContext ctx + + def setup() { + setRequestResponse() + } + + def cleanup() { + } + + def setRequestResponse() { + GrailsMockHttpServletRequest grailsMockHttpServletRequest = new GrailsMockHttpServletRequest() + GrailsMockHttpServletResponse grailsMockHttpServletResponse = new GrailsMockHttpServletResponse() + GrailsWebMockUtil.bindMockWebRequest(ctx, grailsMockHttpServletRequest, grailsMockHttpServletResponse) + } + + void "test uploadShapeFile with resource zip file"() { + given: + // Read the zip file from resources + def zipFileResourceStream = spatialController.class.getResourceAsStream("/projectExtent.zip") + byte[] zipFileBytes = zipFileResourceStream.bytes + + // Mock the request + MockMultipartFile mockMultipartFile = new MockMultipartFile("file", "projectExtent.zip", "application/zip", zipFileBytes) + spatialController.request.addFile(mockMultipartFile) + spatialController.request.method = 'POST' + + when: + // Call the method + spatialController.uploadShapeFile() + + then: + // Verify the response + spatialController.response.status == HttpStatus.SC_OK + println spatialController.response.contentAsString + def responseContent = new JsonSlurper().parseText(spatialController.response.contentAsString) + responseContent.shp_id != null + responseContent["0"].siteId == "340cfe6a-f230-4bb9-a034-23e9bff125c7" + responseContent["0"].name == "Project area for Southern Tablelands Koala Habitat Restoration Project" + + when: + setRequestResponse() + spatialController.request.method = 'GET' + spatialController.params.shapeFileId = responseContent.shp_id + spatialController.params.featureId = "0" + spatialController.getShapeFileFeatureGeoJson() + + then: + spatialController.response.status == HttpStatus.SC_OK + println spatialController.response.contentAsString + def responseJSON = new JsonSlurper().parseText(spatialController.response.contentAsString) + responseJSON.geoJson != null + responseJSON.geoJson.type == "MultiPolygon" + } +} \ No newline at end of file diff --git a/src/integration-test/resources/projectExtent.zip b/src/integration-test/resources/projectExtent.zip new file mode 100644 index 000000000..52e846f76 Binary files /dev/null and b/src/integration-test/resources/projectExtent.zip differ diff --git a/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy b/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy index bda514e80..1bb298f82 100644 --- a/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy +++ b/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy @@ -247,11 +247,15 @@ class GeometryUtils { new GeometryJSON().read(json) } - static Map geometryToGeoJsonMap(Geometry input) { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream() - new GeometryJSON().write(input, new OutputStreamWriter(byteOut, 'UTF-8')) + static Map geometryToGeoJsonMap(Geometry input, int decimals = 4) { + String geoJson = geometryToGeoJsonString(input, decimals) + JSON.parse(geoJson) + } - JSON.parse(byteOut.toString('UTF-8')) + static String geometryToGeoJsonString(Geometry input, int decimals = 4) { + ByteArrayOutputStream byteOut = new ByteArrayOutputStream() + new GeometryJSON(decimals).write(input, new OutputStreamWriter(byteOut, 'UTF-8')) + byteOut.toString('UTF-8') } /** @@ -261,13 +265,15 @@ class GeometryUtils { * @return */ static Map simplify(Map geoJson, double tolerance) { - Geometry input = geoJsonMapToGeometry(geoJson) - - Geometry result = TopologyPreservingSimplifier.simplify(input, tolerance) + Geometry result = simplifyGeometry(input, tolerance) geometryToGeoJsonMap(result) } + static Geometry simplifyGeometry(Geometry input, double tolerance) { + TopologyPreservingSimplifier.simplify(input, tolerance) + } + /** * Iterates through the supplied features and determines which features are neighbours using an diff --git a/src/main/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporter.groovy b/src/main/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporter.groovy index 0c2b060b4..6054449ba 100644 --- a/src/main/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporter.groovy +++ b/src/main/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporter.groovy @@ -1,21 +1,11 @@ package au.org.ala.ecodata.reporting -import au.org.ala.ecodata.ActivityForm -import au.org.ala.ecodata.DataDescription -import au.org.ala.ecodata.ExternalId -import au.org.ala.ecodata.ManagementUnit -import au.org.ala.ecodata.ManagementUnitService -import au.org.ala.ecodata.ProjectService -import au.org.ala.ecodata.Organisation -import au.org.ala.ecodata.OrganisationService -import au.org.ala.ecodata.Program -import au.org.ala.ecodata.ProgramService +import au.org.ala.ecodata.* import au.org.ala.ecodata.metadata.OutputModelProcessor import grails.util.Holders import org.apache.commons.logging.Log import org.apache.commons.logging.LogFactory import pl.touk.excel.export.multisheet.AdditionalSheet - /** * Exports project, site, activity and output data to a Excel spreadsheet. */ @@ -38,11 +28,11 @@ class ProjectXlsExporter extends ProjectExporter { List configurableIntersectionHeaders = getIntersectionHeaders() List configurableIntersectionProperties = getIntersectionProperties() - List commonProjectHeadersWithoutSites = ['Project ID', 'Grant ID', 'External ID', 'Internal order number', 'Work order id', 'Organisation', 'Service Provider', 'Management Unit', 'Name', 'Description', 'Program', 'Sub-program', 'Start Date', 'End Date', 'Contracted Start Date', 'Contracted End Date', 'Funding', 'Funding Type', 'Status', "Last Modified"] + configurableIntersectionHeaders - List commonProjectPropertiesRaw = ['grantId', 'externalId', 'internalOrderId', 'workOrderId', 'organisationName', 'serviceProviderName', 'managementUnitName', 'name', 'description', 'associatedProgram', 'associatedSubProgram', 'plannedStartDate', 'plannedEndDate', 'contractStartDate', 'contractEndDate', 'funding', 'fundingType', 'status', 'lastUpdated'] + configurableIntersectionProperties + List commonProjectHeadersWithoutSites = ['Project ID', 'Grant ID', 'External ID', 'Internal order number', 'Work order id', 'Contracted recipient name', 'Recipient (ID)', 'Management Unit', 'Name', 'Description', 'Program', 'Sub-program', 'Start Date', 'End Date', 'Contracted Start Date', 'Contracted End Date', 'Funding', 'Funding Type', 'Status', "Last Modified"] + configurableIntersectionHeaders + List commonProjectPropertiesRaw = ['grantId', 'externalId', 'internalOrderId', 'workOrderId', 'organisationName', 'organisationId', 'managementUnitName', 'name', 'description', 'associatedProgram', 'associatedSubProgram', 'plannedStartDate', 'plannedEndDate', 'contractStartDate', 'contractEndDate', 'funding', 'fundingType', 'status', 'lastUpdated'] + configurableIntersectionProperties - List projectHeadersWithTerminationReason = ['Project ID', 'Grant ID', 'External ID', 'Internal order number', 'Work order id', 'Organisation', 'Service Provider', 'Management Unit', 'Name', 'Description', 'Program', 'Sub-program', 'Start Date', 'End Date', 'Contracted Start Date', 'Contracted End Date', 'Funding', 'Funding Type', 'Status'] + configurableIntersectionHeaders + ['Termination Reason', 'Last Modified'] - List projectPropertiesTerminationReason = ['grantId', 'externalId', 'internalOrderId', 'workOrderId', 'organisationName', 'serviceProviderName', 'managementUnitName', 'name', 'description', 'associatedProgram', 'associatedSubProgram', 'plannedStartDate', 'plannedEndDate', 'contractStartDate', 'contractEndDate', 'funding', 'fundingType', 'status'] + configurableIntersectionProperties + List projectHeadersWithTerminationReason = ['Project ID', 'Grant ID', 'External ID', 'Internal order number', 'Work order id', 'Contracted recipient name', 'Recipient (ID)', 'Management Unit', 'Name', 'Description', 'Program', 'Sub-program', 'Start Date', 'End Date', 'Contracted Start Date', 'Contracted End Date', 'Funding', 'Funding Type', 'Status'] + configurableIntersectionHeaders + ['Termination Reason', 'Last Modified'] + List projectPropertiesTerminationReason = ['grantId', 'externalId', 'internalOrderId', 'workOrderId', 'organisationName', 'organisationId', 'managementUnitName', 'name', 'description', 'associatedProgram', 'associatedSubProgram', 'plannedStartDate', 'plannedEndDate', 'contractStartDate', 'contractEndDate', 'funding', 'fundingType', 'status'] + configurableIntersectionProperties List projectPropertiesWithTerminationReason = ['projectId'] + projectPropertiesTerminationReason.collect{PROJECT_DATA_PREFIX+it} + ["terminationReason", PROJECT_DATA_PREFIX+"lastUpdated"] @@ -55,8 +45,16 @@ class ProjectXlsExporter extends ProjectExporter { List commonProjectHeaders = commonProjectHeadersWithoutSites + stateHeaders + electorateHeaders + projectApprovalHeaders List commonProjectProperties = commonProjectPropertiesWithoutSites + stateProperties + electorateProperties + projectApprovalProperties - List projectHeaders = projectHeadersWithTerminationReason + projectStateHeaders - List projectProperties = projectPropertiesWithTerminationReason + projectStateProperties + List associatedOrgProjectHeaders = (1..3).collect{['Contracted recipient name '+it, 'Organisation ID '+it, 'Organisation relationship from date '+it, 'Organisation relationship to date '+it, 'Organisation relationship '+it]}.flatten() + List associatedOrgProperties = ['name', 'organisationId', 'fromDate', 'toDate', 'description'] + + List associatedOrgProjectProperties = (1..3).collect{['associatedOrg_name'+it, 'associatedOrg_organisationId'+it, 'associatedOrg_fromDate'+it, 'associatedOrg_toDate'+it, 'associatedOrg_description'+it]}.flatten() + + List organisationDetailsHeaders = ['Project ID', 'Grant ID', 'External ID', 'Program', 'Sub-program', 'Management Unit', 'Project Name', 'Project start date', 'Project end date', 'Contracted recipient name', 'Organisation ID', 'Organisation relationship from date', 'Organisation relationship to date', 'Organisation relationship', 'ABN', 'MERIT organisation name'] + List organisationDetailsProperties = ['projectId', 'project_grantId', 'project_externalId', 'project_associatedProgram', 'project_associatedSubProgram', 'project_managementUnitName', 'project_name', 'project_plannedStartDate', 'project_plannedEndDate', 'name', 'organisationId', 'fromDate', 'toDate', 'description', 'abn', 'organisationName'] + + List projectHeaders = projectHeadersWithTerminationReason + associatedOrgProjectHeaders + projectStateHeaders + List projectProperties = projectPropertiesWithTerminationReason + associatedOrgProjectProperties + projectStateProperties List siteStateHeaders = (1..5).collect{'State '+it} @@ -132,8 +130,8 @@ class ProjectXlsExporter extends ProjectExporter { List rdpProjectDetailsHeaders=commonProjectHeaders + ["Does this project directly support a priority place?","Supported priority places", "Are First Nations people (Indigenous) involved in the project?", "What is the nature of the involvement?","Project delivery assumptions","Project review, evaluation and improvement methodology"] List rdpProjectDetailsProperties =commonProjectProperties + ["supportsPriorityPlace", "supportedPriorityPlaces", "indigenousInvolved", "indigenousInvolvementType", "projectMethodology", "projectREI"] - List datasetHeader = commonProjectHeaders + ["Dataset Title", "What program outcome does this dataset relate to?", "What primary or secondary investment priorities or assets does this dataset relate to?","Other Investment Priority","Which project service and outcome/s does this data set support?","Is this data being collected for reporting against short or medium term outcome statements?", "Is this (a) a baseline dataset associated with a project outcome i.e. against which, change will be measured, (b) a project progress dataset that is tracking change against an established project baseline dataset or (c) a standalone, foundational dataset to inform future management interventions?","Other Dataset Type","Which project baseline does this data set relate to or describe?","What EMSA protocol was used when collecting the data?", "What types of measurements or observations does the dataset include?","Other Measurement Type","Identify the method(s) used to collect the data", "Describe the method used to collect the data in detail", "Identify any apps used during data collection", "Provide a coordinate centroid for the area surveyed", "First collection date", "Last collection date", "Is this data an addition to existing time-series data collected as part of a previous project, or is being collected as part of a broader/national dataset?", "Has your data been included in the Threatened Species Index?","Date of upload", "Who developed/collated the dataset?", "Has a quality assurance check been undertaken on the data?", "Has the data contributed to a publication?", "Where is the data held?", "For all public datasets, please provide the published location. If stored internally by your organisation, write ‘stored internally'", "What format is the dataset?","What is the size of the dataset (KB)?","Unknown size", "Are there any sensitivities in the dataset?", "Primary source of data (organisation or individual that owns or maintains the dataset)", "Dataset custodian (name of contact to obtain access to dataset)", "Progress", "Is Data Collection Ongoing"] - List datasetProperties = commonProjectProperties + ["name", "programOutcome", "investmentPriorities","otherInvestmentPriority","projectOutcomes", "term", "type", "otherDataSetType","baselines", "protocol", "measurementTypes","otherMeasurementType", "methods", "methodDescription", "collectionApp", "location", "startDate", "endDate", "addition", "threatenedSpeciesIndex","threatenedSpeciesIndexUploadDate", "collectorType", "qa", "published", "storageType", "publicationUrl", "format","sizeInKB","sizeUnknown", "sensitivities", "owner", "custodian", "progress", "dataCollectionOngoing"] + List datasetHeader = commonProjectHeaders + ["Dataset Title", "What program outcome does this dataset relate to?", "What primary or secondary investment priorities or assets does this dataset relate to?","Other Investment Priority","Which project service and outcome/s does this data set support?","Is this data being collected for reporting against short or medium term outcome statements?", "Is this (a) a baseline dataset associated with a project outcome i.e. against which, change will be measured, (b) a project progress dataset that is tracking change against an established project baseline dataset or (c) a standalone, foundational dataset to inform future management interventions?","Other Dataset Type","Which project baseline does this data set relate to or describe?","What EMSA protocol was used when collecting the data?", "What types of measurements or observations does the dataset include?","Other Measurement Type","Identify the method(s) used to collect the data", "Describe the method used to collect the data in detail", "Identify any apps used during data collection", "Provide a coordinate centroid for the area surveyed", "First collection date", "Last collection date", "Is this data an addition to existing time-series data collected as part of a previous project, or is being collected as part of a broader/national dataset?", "Has your data been included in the Threatened Species Index?","Date of upload", "Who developed/collated the dataset?", "Has a quality assurance check been undertaken on the data?", "Has the data contributed to a publication?", "Where is the data held?", "For all public datasets, please provide the published location. If stored internally by your organisation, write ‘stored internally'", "What format is the dataset?","What is the size of the dataset (KB)?","Unknown size", "Are there any sensitivities in the dataset?", "Primary source of data (organisation or individual that owns or maintains the dataset)", "Dataset custodian (name of contact to obtain access to dataset)", "Progress", "Is Data Collection Ongoing", "Technical data from Monitor"] + List datasetProperties = commonProjectProperties + ["name", "programOutcome", "investmentPriorities","otherInvestmentPriority","projectOutcomes", "term", "type", "otherDataSetType","baselines", "protocol", "measurementTypes","otherMeasurementType", "methods", "methodDescription", "collectionApp", "location", "startDate", "endDate", "addition", "threatenedSpeciesIndex","threatenedSpeciesIndexUploadDate", "collectorType", "qa", "published", "storageType", "publicationUrl", "format","sizeInKB","sizeUnknown", "sensitivities", "owner", "custodian", "progress", "dataCollectionOngoing", "orgMintedIdentifier"] List electorateInternalOrderNoHeader = (2..3).collect{'Internal order number '+it} List electorateInternalOrderNoProperties = (1..2).collect{PROJECT_DATA_PREFIX+'internalOrderId'+it} @@ -164,8 +162,10 @@ class ProjectXlsExporter extends ProjectExporter { List rdpMonitoringIndicatorsHeaders =commonProjectHeaders + ['Code', 'Monitoring methodology', 'Project service / Target measure/s', 'Monitoring method', 'Evidence to be retained'] List rdpMonitoringIndicatorsProperties =commonProjectProperties + ['relatedBaseline', 'data1', 'relatedTargetMeasures','protocols', 'evidence'] + OutputModelProcessor processor = new OutputModelProcessor() ProjectService projectService + OrganisationService organisationService /** Enables us to pre-create headers for each electorate that will appear in the result set */ List distinctElectorates @@ -185,6 +185,7 @@ class ProjectXlsExporter extends ProjectExporter { ProjectXlsExporter(ProjectService projectService, XlsExporter exporter, ManagementUnitService managementUnitService, OrganisationService organisationService, ProgramService programService) { super(exporter) this.projectService = projectService + this.organisationService = organisationService distinctElectorates = new ArrayList() useSpeciesUrlGetter = true setupManagementUnits(managementUnitService) @@ -195,6 +196,7 @@ class ProjectXlsExporter extends ProjectExporter { ProjectXlsExporter(ProjectService projectService, XlsExporter exporter, List tabsToExport, List electorates, ManagementUnitService managementUnitService, OrganisationService organisationService, ProgramService programService, Map downloadMetadata, boolean formSectionPerTab = false) { super(exporter, tabsToExport, [:], TimeZone.default) this.projectService = projectService + this.organisationService = organisationService this.formSectionPerTab = formSectionPerTab useSpeciesUrlGetter = true addDataDescriptionToDownload(downloadMetadata) @@ -275,12 +277,12 @@ class ProjectXlsExporter extends ProjectExporter { private static String getHeaderNameForFacet (String facetName, String prefix = "Primary") { Map names = Holders.config.getProperty("app.facets.displayNames", Map) - String name = names[facetName] + String name = names[facetName]['headerName'] return "$prefix $name (Interpreted)" } private static String getPropertyNameForFacet (String facetName, String prefix = "primary") { - return "interpreted_$prefix$facetName" + return "$prefix$facetName" } void export(Map project) { @@ -289,6 +291,7 @@ class ProjectXlsExporter extends ProjectExporter { addProjectGeo(project) exportProject(project) + exportProjectOrganisationData(project) exportOutputTargets(project) exportSites(project) exportDocuments(project) @@ -320,39 +323,21 @@ class ProjectXlsExporter extends ProjectExporter { if (project.managementUnitId) { project[PROJECT_DATA_PREFIX+'managementUnitName'] = managementUnitNames[project.managementUnitId] } + + Date now = new Date() + List orgs = project.associatedOrgs?.findAll{(!it.fromDate || it.fromDate <= now) && (!it.toDate || it.toDate >= now)} + if (orgs) { + project.organisationName = orgs[0].name + project.organisationId = orgs[0].organisationId + } + filterExternalIds(project, PROJECT_DATA_PREFIX) } private void addPrimaryAndOtherIntersections (Map project) { - Map intersections = projectService.orderLayerIntersectionsByAreaOfProjectSites(project) - Map config = metadataService.getGeographicConfig() - List intersectionLayers = config.checkForBoundaryIntersectionInLayers - intersectionLayers?.each { layer -> - Map facetName = metadataService.getGeographicFacetConfig(layer) - if (facetName.name) { - List intersectionValues = intersections[layer] - if (intersectionValues) { - project[getPropertyNameForFacet(facetName.name)] = intersectionValues.pop() - project[getPropertyNameForFacet(facetName.name,"other")] = intersectionValues.join("; ") - } - } - else - log.error ("No facet config found for layer $layer.") - } - - if (project.geographicInfo) { - // load from manually assigned electorates/states - if (!project.containsKey(getPropertyNameForFacet("elect"))) { - project[getPropertyNameForFacet("elect")] = project.geographicInfo.primaryElectorate - project[getPropertyNameForFacet("elect","other")] = project.geographicInfo.otherElectorates?.join("; ") - } - - if (!project.containsKey(getPropertyNameForFacet("state"))) { - project[getPropertyNameForFacet("state")] = project.geographicInfo.primaryState - project[getPropertyNameForFacet("state","other")] = project.geographicInfo.otherStates?.join("; ") - } - } + Map result = projectService.findStateAndElectorateForProject(project) ?: [:] + project << result } private addProjectGeo(Map project) { @@ -378,6 +363,8 @@ class ProjectXlsExporter extends ProjectExporter { } } + + void exportActivities(Map project) { tabsToExport.each { tab -> List activities = project?.activities?.findAll { it.type == tab } @@ -432,6 +419,31 @@ class ProjectXlsExporter extends ProjectExporter { } } + void exportProjectOrganisationData(Map project) { + String sheetName = 'Organisation Details' + if (shouldExport(sheetName)) { + AdditionalSheet sitesSheet = getSheet(sheetName, organisationDetailsProperties, organisationDetailsHeaders) + List associatedOrgs = [] + + project.associatedOrgs?.each { org -> + Map orgProps = org+project + if (org.organisationId) { + Map organisation = organisationService.get(org.organisationId) + orgProps['abn'] = organisation?.abn + orgProps['organisationName'] = organisation?.name + } + else { + orgProps['organisationName'] = '' + } + + associatedOrgs << orgProps + + } + int row = sitesSheet.getSheet().lastRowNum + sitesSheet.add(associatedOrgs, organisationDetailsProperties, row + 1) + } + } + private void exportSites(Map project) { String sheetName = 'Sites' if (shouldExport(sheetName)) { @@ -499,6 +511,13 @@ class ProjectXlsExporter extends ProjectExporter { project[electorate] = projectElectorates.contains(electorate) ? 'Y' : 'N' } + project.associatedOrgs?.eachWithIndex { org, i -> + Map orgProps = associatedOrgProperties.collectEntries{ + [('associatedOrg_'+it+(i+1)):org[it]] + } + project.putAll(orgProps) + } + projectSheet.add([project], properties, row + 1) } diff --git a/src/main/groovy/au/org/ala/ecodata/spatial/SpatialConversionUtils.groovy b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialConversionUtils.groovy new file mode 100644 index 000000000..697c8e4b1 --- /dev/null +++ b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialConversionUtils.groovy @@ -0,0 +1,112 @@ +package au.org.ala.ecodata.spatial + +import com.google.common.io.Files +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.tuple.Pair +import org.geotools.data.FileDataStore +import org.geotools.data.FileDataStoreFinder +import org.geotools.data.simple.SimpleFeatureCollection +import org.geotools.data.simple.SimpleFeatureIterator +import org.geotools.data.simple.SimpleFeatureSource +import org.opengis.feature.Property +import org.opengis.feature.simple.SimpleFeature +import org.opengis.feature.type.GeometryType + +import java.util.zip.ZipEntry +import java.util.zip.ZipFile +/** + * Utilities for converting spatial data between formats + * + * @author ChrisF + */ +@Slf4j +@CompileStatic +class SpatialConversionUtils { + static Pair extractZippedShapeFile(File zippedShpFile) throws IOException { + + File tempDir = Files.createTempDir() + + // Unpack the zipped shape file into the temp directory + ZipFile zf = null + File shpFile = null + try { + zf = new ZipFile(zippedShpFile) + + boolean shpPresent = false + boolean shxPresent = false + boolean dbfPresent = false + + Enumeration entries = zf.entries() + + while (entries.hasMoreElements()) { + ZipEntry entry = entries.nextElement() + InputStream inStream = zf.getInputStream(entry) + File f = new File(tempDir, entry.getName()) + if (!f.getName().startsWith(".")) { + if (entry.isDirectory()) { + f.mkdirs() + } else { + FileOutputStream outStream = new FileOutputStream(f) + IOUtils.copy(inStream, outStream) + + if (entry.getName().endsWith(".shp")) { + shpPresent = true + shpFile = f + } else if (entry.getName().endsWith(".shx") && !f.getName().startsWith("/")) { + shxPresent = true + } else if (entry.getName().endsWith(".dbf") && !f.getName().startsWith("/")) { + dbfPresent = true + } + } + } + } + + if (!shpPresent || !shxPresent || !dbfPresent) { + throw new IllegalArgumentException("Invalid archive. Must contain .shp, .shx and .dbf at a minimum.") + } + } catch (Exception e) { + log.error(e.getMessage(), e) + } finally { + if (zf != null) { + try { + zf.close() + } catch (Exception e) { + log.error(e.getMessage(), e) + } + } + } + + if (shpFile == null) { + return null + } else { + return Pair.of(shpFile.getParentFile().getName(), shpFile) + } + } + + static List>> getShapeFileManifest(File shpFile) throws IOException { + List>> manifestData = new ArrayList>>() + + FileDataStore store = FileDataStoreFinder.getDataStore(shpFile) + + SimpleFeatureSource featureSource = store.getFeatureSource(store.getTypeNames()[0]) + SimpleFeatureCollection featureCollection = featureSource.getFeatures() + SimpleFeatureIterator it = featureCollection.features() + + while (it.hasNext()) { + SimpleFeature feature = it.next() + List> pairList = new ArrayList>() + for (Property prop : feature.getProperties()) { + if (!(prop.getType() instanceof GeometryType)) { + Pair pair = Pair.of(prop.getName().toString(), feature.getAttribute(prop.getName())) + pairList.add(pair) + } + } + manifestData.add(pairList) + } + + return manifestData + } +} + diff --git a/src/main/groovy/au/org/ala/ecodata/spatial/SpatialUtils.groovy b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialUtils.groovy new file mode 100755 index 000000000..948b2290a --- /dev/null +++ b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialUtils.groovy @@ -0,0 +1,127 @@ +/* + * To change this template, choose Tools | Templates + * and open the template in the editor. + */ +package au.org.ala.ecodata.spatial + + +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import org.geotools.data.FileDataStore +import org.geotools.data.FileDataStoreFinder +import org.geotools.data.simple.SimpleFeatureCollection +import org.geotools.data.simple.SimpleFeatureIterator +import org.geotools.data.simple.SimpleFeatureSource +import org.geotools.geometry.jts.JTS +import org.geotools.geometry.jts.JTSFactoryFinder +import org.geotools.referencing.CRS +import org.geotools.referencing.crs.DefaultGeographicCRS +import org.locationtech.jts.geom.Geometry +import org.locationtech.jts.geom.GeometryCollection +import org.locationtech.jts.geom.GeometryFactory +import org.opengis.feature.simple.SimpleFeature +import org.opengis.referencing.crs.CoordinateReferenceSystem + +@CompileStatic +@Slf4j +class SpatialUtils { + static Geometry getShapeFileFeaturesAsGeometry(File shpFileDir, String featureIndexes) throws IOException { + + if (!shpFileDir.exists() || !shpFileDir.isDirectory()) { + throw new IllegalArgumentException("Supplied directory does not exist or is not a directory") + } + + List geometries = new ArrayList() + FileDataStore store = null + SimpleFeatureIterator it = null + + try { + + File shpFile = null + for (File f : shpFileDir.listFiles()) { + if (f.getName().endsWith(".shp")) { + shpFile = f + break + } + } + + if (shpFile == null) { + throw new IllegalArgumentException("No .shp file present in directory") + } + + store = FileDataStoreFinder.getDataStore(shpFile) + + SimpleFeatureSource featureSource = store.getFeatureSource(store.getTypeNames()[0]) + SimpleFeatureCollection featureCollection = featureSource.getFeatures() + it = featureCollection.features() + + //transform CRS to the same as the shapefile (at least try) + //default to 4326 + CoordinateReferenceSystem crs = null + try { + crs = store.getSchema().getCoordinateReferenceSystem() + if (crs == null) { + //attempt to parse prj + try { + File prjFile = new File(shpFile.getPath().substring(0, shpFile.getPath().length() - 3) + "prj") + if (prjFile.exists()) { + String prj = prjFile.text + + if (prj == "PROJCS[\"WGS_1984_Web_Mercator_Auxiliary_Sphere\",GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137.0,298.257223563]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]],PROJECTION[\"Mercator_Auxiliary_Sphere\"],PARAMETER[\"False_Easting\",0.0],PARAMETER[\"False_Northing\",0.0],PARAMETER[\"Central_Meridian\",0.0],PARAMETER[\"Standard_Parallel_1\",0.0],PARAMETER[\"Auxiliary_Sphere_Type\",0.0],UNIT[\"Meter\",1.0]]") { + //support for arcgis online default shp exports + crs = CRS.decode("EPSG:3857") + } else { + crs = CRS.parseWKT(prjFile.text) + } + } + } catch (Exception ignored) { + } + + if (crs == null) { + crs = DefaultGeographicCRS.WGS84 + } + } + } catch (Exception ignored) { + } + + int i = 0 + boolean all = "all".equalsIgnoreCase(featureIndexes) + def indexes = [] + if (!all) featureIndexes.split(",").each { indexes.push(it.toInteger()) } + while (it.hasNext()) { + SimpleFeature feature = (SimpleFeature) it.next() + if (all || indexes.contains(i)) { + geometries.add(feature.getDefaultGeometry() as Geometry) + } + i++ + } + + Geometry mergedGeometry + + if (geometries.size() == 1) { + mergedGeometry = geometries.get(0) + } else { + GeometryFactory factory = JTSFactoryFinder.getGeometryFactory(null) + GeometryCollection geometryCollection = (GeometryCollection) factory.buildGeometry(geometries) + + // note the following geometry collection may be invalid (say with overlapping polygons) + mergedGeometry = geometryCollection.union() + } + + try { + return JTS.transform(mergedGeometry, CRS.findMathTransform(crs, DefaultGeographicCRS.WGS84, true)) + } catch (Exception ignored) { + return mergedGeometry + } + } catch (Exception e) { + throw e + } finally { + if (it != null) { + it.close() + } + if (store != null) { + store.dispose() + } + } + } +} diff --git a/src/test/groovy/au/org/ala/ecodata/OrganisationServiceSpec.groovy b/src/test/groovy/au/org/ala/ecodata/OrganisationServiceSpec.groovy index 95275f532..aeaabf062 100644 --- a/src/test/groovy/au/org/ala/ecodata/OrganisationServiceSpec.groovy +++ b/src/test/groovy/au/org/ala/ecodata/OrganisationServiceSpec.groovy @@ -42,7 +42,7 @@ class OrganisationServiceSpec extends Specification implements ServiceUnitTest> projects - projectService.search([orgIdSvcProvider: orgId]) >> [] - - - when: - def result - // print (orgId) - // Organisation.withNewTransaction { - result = service.get(orgId) - // print result - // } - // def result = service.toMap(org) - - then: - result.organisationId == orgId - result.name == org.name - result.description == org.description - result.projects == null - - when: - // Organisation.withNewTransaction { - // result = service.get(orgId, [OrganisationService.PROJECTS]) - // print result - // } - def result1 = service.toMap(org, [OrganisationService.PROJECTS]) - - then: - result1.organisationId == orgId - result1.name == org.name - result1.description == org.description - result1.dynamicProperty == org['dynamicProperty'] - result1.projects == projects - - } -*/ - - - - } diff --git a/src/test/groovy/au/org/ala/ecodata/ProjectServiceSpec.groovy b/src/test/groovy/au/org/ala/ecodata/ProjectServiceSpec.groovy index 7924276c7..cc0192f5a 100644 --- a/src/test/groovy/au/org/ala/ecodata/ProjectServiceSpec.groovy +++ b/src/test/groovy/au/org/ala/ecodata/ProjectServiceSpec.groovy @@ -1064,4 +1064,114 @@ class ProjectServiceSpec extends MongoSpec implements ServiceUnitTest> geographicConfig + metadataService.getGeographicConfig(*_) >> geographicConfig + metadataService.getGeographicFacetConfig("layer1") >> [name: "state", grouped: false] + metadataService.getGeographicFacetConfig("layer1", _) >> [name: "state", grouped: false] + metadataService.getGeographicFacetConfig("layer2") >> [name: "elect", grouped: false] + metadataService.getGeographicFacetConfig("layer2", _) >> [name: "elect", grouped: false] + + when: + Map result = service.findStateAndElectorateForProject(project) + + then: + result.primarystate == "state1" + result.otherstate == "state2; state3" + result.primaryelect == "electorate2" + result.otherelect == "electorate1" + } + + def "findStateAndElectorateForProject should return default geographic info if isDefault is false and project sites are empty"() { + given: + Map project = [geographicInfo: [isDefault: false, primaryState: "ACT", otherStates: ['NSW', 'VIC'], primaryElectorate: "Bean", otherElectorates: ['Canberra', 'Fenner']]] + Map geographicConfig = [ + contextual: [state: 'layer1', elect: 'layer2'], + checkForBoundaryIntersectionInLayers: ["layer1", "layer2"] + ] + + metadataService.getGeographicConfig(*_) >> geographicConfig + metadataService.getGeographicFacetConfig("layer1") >> [name: "state", grouped: false] + metadataService.getGeographicFacetConfig("layer2") >> [name: "elect", grouped: false] + service.getRepresentativeSitesOfProject(project) >> [] + + + when: + Map result = service.findStateAndElectorateForProject(project) + + then: + result.primarystate == "ACT" + result.otherstate == "NSW; VIC" + result.primaryelect == "Bean" + result.otherelect == "Canberra; Fenner" + } + + + def "findStateAndElectorateForProject should return default geographic info if isDefault is true"() { + given: + Map project = [geographicInfo: [isDefault: true, primaryState: "ACT", otherStates: ['NSW', 'VIC'], primaryElectorate: "Bean", otherElectorates: ['Canberra', 'Fenner']]] + + when: + Map result = service.findStateAndElectorateForProject(project) + + then: + result.primarystate == "ACT" + result.otherstate == "NSW; VIC" + result.primaryelect == "Bean" + result.otherelect == "Canberra; Fenner" + } + + def "findStateAndElectorateForProject should return empty map if project is null"() { + when: + Map project = null + Map result = service.findStateAndElectorateForProject(project) + + then: + result.isEmpty() + } } diff --git a/src/test/groovy/au/org/ala/ecodata/SearchControllerSpec.groovy b/src/test/groovy/au/org/ala/ecodata/SearchControllerSpec.groovy index 67a08bcc1..8a587509f 100644 --- a/src/test/groovy/au/org/ala/ecodata/SearchControllerSpec.groovy +++ b/src/test/groovy/au/org/ala/ecodata/SearchControllerSpec.groovy @@ -90,7 +90,7 @@ class SearchControllerSpec extends Specification implements ControllerUnitTest> getShape2() 1 * webService.get("/ws/shapes/wkt/456") >> getBoundaryShape() } + + def "titleCase should capitalize the first letter of each word"() { + expect: + service.titleCase("new south wales") == "New South Wales" + service.titleCase("australian capital territory") == "Australian Capital Territory" + service.titleCase("act") == "Act" + service.titleCase("nSw") == "Nsw" + } private Geometry getBoundaryShape() { return GeometryUtils.geoJsonMapToGeometry(mapper.readValue('{' + diff --git a/src/test/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporterSpec.groovy b/src/test/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporterSpec.groovy index b09cf2984..aa64f203a 100644 --- a/src/test/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporterSpec.groovy +++ b/src/test/groovy/au/org/ala/ecodata/reporting/ProjectXlsExporterSpec.groovy @@ -230,7 +230,7 @@ class ProjectXlsExporterSpec extends Specification implements GrailsUnitTest { setup: String sheet = "Electorate Coord" Map project = project() - projectService.orderLayerIntersectionsByAreaOfProjectSites(_) >> ["cl927": ["ACT"], "cl11163": ["bean", "fenner", "canberra"]] + projectService.findStateAndElectorateForProject(_) >> ["primarystate": "ACT", "otherstate": null, "primaryelect": "bean", "otherelect": "fenner; canberra"] when: projectXlsExporter.export(project)