Skip to content

Commit

Permalink
Add CORS and Handle query by WKT (#33)
Browse files Browse the repository at this point in the history
* adding openapi using boilerplate

* adding openapi using boilerplate

* update index

* remove prefix

* add openapi v3 instead

* use openapi v3

* add conneg mime types

* add params and minor updates to api spec

* update openapi spec as per discussions

* update openapi spec searchGeomByPositionAndDataset

* add /dataset endpoint

* first version of /datasets/ impl

* adding dataset template

* adding backend code to render dataset

* update templates

* update openapi with dataset/item endpoint

* adding proxyfix

* add werkqeug

* fix werkzeug bug

* potential fix for pg too many clients. set to autocommit

* add db conn pool

* adding expplicit commit statement

* wrap connections in a try-finally block

* revising mapping from contractedcatchment

* handle dbconn if down

* update layout to point to the single layout template

* if no_items none, then use 0

* update geom view page to use single layout

* alternates to use single layout

* adding first code for search by wkt

* first cut at find by wkt intersect

* adding find by wkt and gis op

* fix CORS and use POST body for /wkt

* splitting out mappings to a separate file. load in via read file
  • Loading branch information
jyucsiro authored Dec 15, 2020
1 parent ee2cd4c commit 3dc2c41
Show file tree
Hide file tree
Showing 7 changed files with 138 additions and 36 deletions.
7 changes: 4 additions & 3 deletions api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@


app = Flask(__name__, template_folder=conf.TEMPLATES_DIR, static_folder=conf.STATIC_DIR)
CORS(app)
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_host=1)


CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(pages.pages)
app.register_blueprint(classes.classes)
app.config['CORS_HEADERS'] = 'Content-Type'



### swagger specific ###
SWAGGER_URL = '/api/doc'
Expand Down
89 changes: 88 additions & 1 deletion api/controller/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

classes = Blueprint('classes', __name__)

dataset_mappings = DatasetMappings()

dogs = [
{
Expand Down Expand Up @@ -108,6 +109,32 @@ def search_by_latlng_and_dataset(latlng, dataset):
renderer = SearchResultsRenderer(request, request.base_url, list_results, 'page_searchresults.html')
return renderer.render()

@classes.route('/search/wkt', methods = ['HEAD', 'POST', 'GET'])
def search_by_wkt():
if request.method == 'POST':
crs = 4326
if 'crs' in request.args:
crs = request.args.get('crs','4326')
print(request.args)
request_json = request.get_json()
wkt = request_json.get('wkt')
dataset = request_json.get('dataset')
operation = request_json.get('operation')
#wkt = request.form.get('wkt')
#dataset = request.form.get('dataset')
#operation = request.form.get('operation')
if operation is None:
list_results = find_geometry_by_wkt(wkt, crs=crs, dataset=dataset)
else:
list_results = find_geometry_by_wkt(wkt, crs=crs, dataset=dataset, operation=operation)
if list_results is None:
return Response("Not Found", status=404)
renderer = SearchResultsRenderer(request, request.base_url, list_results, 'page_searchresults.html')
return renderer.render()
elif request.method == 'GET':
return "Use POST"



geom_list = [
{
Expand Down Expand Up @@ -257,7 +284,67 @@ def find_geometry_by_latlng(latlng, dataset=None, crs='4326'):
r_obj['id'] = r[0]
r_obj['dataset'] = r[1]
r_obj['geometry'] = request.host_url + "geometry/{dataset}/{id}".format(dataset=r_obj['dataset'],id=r_obj['id'])
r_obj['feature'] = DatasetMappings.find_resource_uri(r_obj['dataset'],r_obj['id'])
r_obj['feature'] = dataset_mappings.find_resource_uri(r_obj['dataset'],r_obj['id'])
fmt_results.append(r_obj)
r_obj = { 'count': len(fmt_results), 'res': fmt_results }
except Exception as e:
print(e)
conn.rollback()
cur.close()
conn.commit()
r_obj = { 'count': -1, 'res': [], 'errcode': 2}
finally:
dbpool.putconn(conn)
return { 'count': len(fmt_results), 'res': fmt_results }

def find_geometry_by_wkt(wkt, dataset=None, crs='4326', operation='intersects'):
"""
Assumes there is a Postgis database with connection config specified in system environment variables.
Also assumes there is a table/view called 'combined_geoms' with structure (id, dataset, geom).
This function connects to the DB, and queries for matching geoms based on input wkt.
Default CRS is WGS84 (4326)
"""
global dbpool
if wkt is None:
return { 'count': -1, 'res': None, 'errcode': 1}
postgis_op = None
if operation == 'intersects':
postgis_op = "ST_Intersects"
elif operation == 'contains':
postgis_op = "ST_Contains"
elif operation == 'overlaps':
postgis_op = "ST_Overlaps"
else:
return { 'count': -1, 'res': None, 'errcode': 2}
query_list = []
#query 1: no dataset specified so query all
query_list.append('SELECT id, dataset FROM combined_geoms WHERE {} ( ST_Transform(ST_GeomFromText(%s, %s),3577) , geom);'.format(postgis_op))
#query 2: dataset _is_ specified so query by dataset
query_list.append('SELECT id, dataset FROM combined_geoms WHERE dataset = %s and ST_Intersects( ST_Transform(ST_GeomFromText(%s, %s),3577) , geom);'.format(postgis_op))
fmt_results = []
r_obj = {}
try:
if dbpool is None:
dbpool = establish_dbpool()
conn = dbpool.getconn()
conn.set_session(readonly=True, autocommit=True)
cur = conn.cursor()
if dataset is None:
cur.execute(query_list[0], (str(wkt), str(crs)))
else:
cur.execute(query_list[1], (str(dataset), str(wkt), str(crs)))
results = cur.fetchall()
cur.close()
conn.commit()
if results == None:
r_obj = { 'count': -1, 'res': []}
else:
for r in results:
r_obj = {}
r_obj['id'] = r[0]
r_obj['dataset'] = r[1]
r_obj['geometry'] = request.host_url + "geometry/{dataset}/{id}".format(dataset=r_obj['dataset'],id=r_obj['id'])
r_obj['feature'] = dataset_mappings.find_resource_uri(r_obj['dataset'],r_obj['id'])
fmt_results.append(r_obj)
r_obj = { 'count': len(fmt_results), 'res': fmt_results }
except Exception as e:
Expand Down
24 changes: 24 additions & 0 deletions api/model/default_mappings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"asgs16_sa1": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel1",
"asgs16_sa2": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel2",
"asgs16_sa3": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel3",
"asgs16_sa4": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel4",
"asgs16_mb": "http://linked.data.gov.au/dataset/asgs2016/meshblock",
"asgs16_ste": "http://linked.data.gov.au/dataset/asgs2016/stateorterritory",
"asgs16_sua": "http://linked.data.gov.au/dataset/asgs2016/significanturbanarea",
"asgs16_ireg": "http://linked.data.gov.au/dataset/asgs2016/indigenousregion",
"asgs16_iloc": "http://linked.data.gov.au/dataset/asgs2016/indigenouslocation",
"asgs16_iare": "http://linked.data.gov.au/dataset/asgs2016/indigenousarea",
"asgs16_ra": "http://linked.data.gov.au/dataset/asgs2016/remotenessarea",
"asgs16_gccsa": "http://linked.data.gov.au/dataset/asgs2016/greatercapitalcitystatisticalarea",
"asgs16_ucl": "http://linked.data.gov.au/dataset/asgs2016/urbancentreandlocality",
"asgs16_sosr": "http://linked.data.gov.au/dataset/asgs2016/sectionofstaterange",
"asgs16_sos": "http://linked.data.gov.au/dataset/asgs2016/sectionofstate",
"asgs16_lga": "http://linked.data.gov.au/dataset/asgs2016/localgovernmentarea",
"asgs16_ced": "http://linked.data.gov.au/dataset/asgs2016/commonwealthelectoraldivision",
"asgs16_ssc": "http://linked.data.gov.au/dataset/asgs2016/statesuburb",
"asgs16_nrmr": "http://linked.data.gov.au/dataset/asgs2016/naturalresourcemanagementregion",
"geofabric2_1_1_contractedcatchment": "http://linked.data.gov.au/dataset/geofabric/contractedcatchment",
"geofabric2_1_1_riverregion": "http://linked.data.gov.au/dataset/geofabric/riverregion",
"geofabric2_1_1_awradrainagedivision": "http://linked.data.gov.au/dataset/geofabric/drainagedivision"
}
6 changes: 3 additions & 3 deletions api/model/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import os
from .mappings import DatasetMappings


dataset_mappings = DatasetMappings()

GeometryView = View("GeometryView", "A profile of geometry.", ['text/html', 'application/json', 'text/turtle', 'text/plain'],
'text/html', namespace="http://example.org/def/geometryview")
Expand All @@ -22,7 +22,7 @@
'text/html', namespace="http://example.org/def/simplifiedgeomview")

class GeometryRenderer(Renderer):
DATASET_RESOURCE_BASE_URI_LOOKUP = DatasetMappings.DATASET_RESOURCE_BASE_URI_LOOKUP
#DATASET_RESOURCE_BASE_URI_LOOKUP = dataset_mappings.DATASET_RESOURCE_BASE_URI_LOOKUP
def __init__(self, request, uri, instance, geom_html_template, **kwargs):
self.views = {
'geometryview': GeometryView,
Expand Down Expand Up @@ -123,7 +123,7 @@ def export_rdf(self, model_view='geometryview', rdf_mime='text/turtle'):
def _find_resource_uris(self):
dataset = self.instance["dataset"]
id = self.instance["id"]
prefix = GeometryRenderer.DATASET_RESOURCE_BASE_URI_LOOKUP.get(dataset)
prefix = dataset_mappings.get_prefix(dataset)
if prefix is None:
return None
return "{0}/{1}".format(prefix, id)
Expand Down
45 changes: 17 additions & 28 deletions api/model/mappings.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,22 @@
import json
import sys
from importlib_resources import files

class DatasetMappings():
DATASET_RESOURCE_BASE_URI_LOOKUP = {
"asgs16_sa1": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel1",
"asgs16_sa2": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel2",
"asgs16_sa3": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel3",
"asgs16_sa4": "http://linked.data.gov.au/dataset/asgs2016/statisticalarealevel4",
"asgs16_mb": "http://linked.data.gov.au/dataset/asgs2016/meshblock",
"asgs16_ste": "http://linked.data.gov.au/dataset/asgs2016/stateorterritory",
"asgs16_sua": "http://linked.data.gov.au/dataset/asgs2016/significanturbanarea",
"asgs16_ireg": "http://linked.data.gov.au/dataset/asgs2016/indigenousregion",
"asgs16_iloc": "http://linked.data.gov.au/dataset/asgs2016/indigenouslocation",
"asgs16_iare": "http://linked.data.gov.au/dataset/asgs2016/indigenousarea",
"asgs16_ra": "http://linked.data.gov.au/dataset/asgs2016/remotenessarea",
"asgs16_gccsa": "http://linked.data.gov.au/dataset/asgs2016/greatercapitalcitystatisticalarea",
"asgs16_ucl": "http://linked.data.gov.au/dataset/asgs2016/urbancentreandlocality",
"asgs16_sosr": "http://linked.data.gov.au/dataset/asgs2016/sectionofstaterange",
"asgs16_sos": "http://linked.data.gov.au/dataset/asgs2016/sectionofstate",
"asgs16_lga": "http://linked.data.gov.au/dataset/asgs2016/localgovernmentarea",
"asgs16_ced": "http://linked.data.gov.au/dataset/asgs2016/commonwealthelectoraldivision",
"asgs16_ssc": "http://linked.data.gov.au/dataset/asgs2016/statesuburb",
"asgs16_nrmr": "http://linked.data.gov.au/dataset/asgs2016/naturalresourcemanagementregion",
"geofabric2_1_1_contractedcatchment": "http://linked.data.gov.au/dataset/geofabric/contractedcatchment",
"geofabric2_1_1_riverregion": "http://linked.data.gov.au/dataset/geofabric/riverregion",
"geofabric2_1_1_awradrainagedivision": "http://linked.data.gov.au/dataset/geofabric/drainagedivision"
}
@classmethod
def find_resource_uri(cls, dataset_type, dataset_local_id):
prefix = cls.DATASET_RESOURCE_BASE_URI_LOOKUP.get(dataset_type)
class DatasetMappings:
DATASET_RESOURCE_BASE_URI_LOOKUP = {}

def __init__(self):
dict_lookup = {}
jsonfile = files('model').joinpath('default_mappings.json').read_text()
dict_lookup = json.loads(jsonfile)
self.DATASET_RESOURCE_BASE_URI_LOOKUP = dict_lookup

def find_resource_uri(self, dataset_type, dataset_local_id):
prefix = self.DATASET_RESOURCE_BASE_URI_LOOKUP.get(dataset_type)
if prefix is None:
return None
return "{0}/{1}".format(prefix, dataset_local_id)

def get_prefix(self, dataset_type):
return self.DATASET_RESOURCE_BASE_URI_LOOKUP.get(dataset_type)

1 change: 0 additions & 1 deletion api/model/search_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from rdflib import Graph, URIRef, RDF, RDFS, XSD, OWL, Namespace, Literal, BNode
import _config as config
import json
from .mappings import DatasetMappings


SearchResultsView = View("SearchResultsView", "A profile of search results view.", ['text/html', 'application/json'],
Expand Down
2 changes: 2 additions & 0 deletions api/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@ flask-cors
flask_swagger_ui
flask-restplus
Werkzeug
python-dotenv
importlib-resources

0 comments on commit 3dc2c41

Please sign in to comment.