This repository has been archived by the owner on Nov 4, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge remote-tracking branch 'origin/develop' into font2
- Loading branch information
Showing
12 changed files
with
1,112 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
language: node_js | ||
node_js: | ||
- 10.17.0 | ||
env: | ||
matrix: | ||
secure: gSb+5hzjOEsBsrJRIb8x2pQF/Y/yB4b1d1OGW2BkXCLECwFfk/UCCpE1LIWLeK2XJpuF9H/yewZFc2lrgYsM1NI1Wn33BcOFWuvCq3uyLFlpdiB5jKNXvLYA6xmudwbvWPh9AjubN2IJplatF2EAMyf1JjgOX+M3lEWOGqiaOYMLjP+8PphInhbscMDWJb2ni601Q+iBDtEkhwLPWVIzz0gY3EyXta0YbtbiWvjhsFE9NYf2MuB/s9xb3UK09jq2bBtD4M80+ppzYUTmq8s3GzTPqpPm0vww67xkMUPA4tkgygFNwYYETeytLL9bdyDY8PitJfGn26qoZTPSy6z2TOJEmEVhf077Tk25FrJcIjFltQ3Nne/NYczTwFqUrrXNoLPgplm4zMy3LZZKweL11juMmbXmyTZ3fywGqJ8RwKPEKO3Qeuv0xtsPiAc1qmh6bC5Y7E65G5rDj77ei+7BFc12nSjqBCa0rLx1iC47fzeGTwWKCIb6A6tJbr6mRVkRbcP7M9rQ/UMWYskrFLgGbvrgHX0Hn28uHS76CNBAwvh0eQk3iG2iOIZ9GVC32Lh66daLL9noDhTiDGUdmp6q7ofG8jtNDPvnqF6CXgUBgpE8GGDd6paTmHT4rIiKLUI42sQwOzfIhwt7exckspqyZvvZbnkYg/Yk7bR8vTHLZ0M= | ||
cache: | ||
directories: | ||
- "~/.npm" | ||
- ".jest" | ||
before_install: | ||
- npm install -g npm@latest | ||
- npm install -g expo-cli | ||
stages: | ||
- build | ||
- test | ||
#- name: deploy staging | ||
# if: branch = develop | ||
#- name: deploy production | ||
# if: branch = master | ||
jobs: | ||
include: | ||
- stage: test | ||
script: | ||
- cd frontend | ||
- npm ci | ||
- npx jest --ci --passWithNoTests | ||
# - stage: deploy staging | ||
# script: | ||
# - cd frontend | ||
# - npm ci | ||
# - npx expo login -u $EXPO_USERNAME -p $EXPO_PASSWORD | ||
# - npx expo publish --non-interactive --release-channel staging | ||
# - stage: deploy production | ||
# script: | ||
# - cd frontend | ||
# - npm ci | ||
# - npx expo login -u $EXPO_USERNAME -p $EXPO_PASSWORD | ||
# - npx expo publish --non-interactive --release-channel production |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,10 @@ | ||
# software_engineering_g2 | ||
Repository for Software Engineering Group 2 | ||
|
||
## CI Statuses | ||
|
||
| | Master | Develop | | ||
| ---------- | ------ | ------- | | ||
| Build | [![Build Status](https://travis-ci.com/markscamilleri/software_engineering_g2.svg?branch=master)](https://travis-ci.com/markscamilleri/software_engineering_g2) | [![Build Status](https://travis-ci.com/markscamilleri/software_engineering_g2.svg?branch=develop)](https://travis-ci.com/markscamilleri/software_engineering_g2) | | ||
| Deepscan | [![DeepScan grade](https://deepscan.io/api/teams/5858/projects/7696/branches/81777/badge/grade.svg)](https://deepscan.io/dashboard#view=project&tid=5858&pid=7696&bid=81777) | [![DeepScan grade](https://deepscan.io/api/teams/5858/projects/7696/branches/81775/badge/grade.svg)](https://deepscan.io/dashboard#view=project&tid=5858&pid=7696&bid=81775) | | ||
| CodeFactor | [![CodeFactor](https://www.codefactor.io/repository/github/markscamilleri/software_engineering_g2/badge/master)](https://www.codefactor.io/repository/github/markscamilleri/software_engineering_g2/overview/master) | [![CodeFactor](https://www.codefactor.io/repository/github/markscamilleri/software_engineering_g2/badge/develop)](https://www.codefactor.io/repository/github/markscamilleri/software_engineering_g2/overview/develop) | |
Binary file removed
BIN
-17.4 KB
Project_Plan/Advanced Software Engineering Project Plan document.docx
Binary file not shown.
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,176 @@ | ||
import logging | ||
import asyncio | ||
import re | ||
from threading import Thread | ||
from typing import Optional, Callable, Dict, Any, List, Iterable | ||
|
||
import janus | ||
import mysql.connector | ||
import mysql.connector.pooling | ||
|
||
from exception import InvalidArgumentException, ProgramClosingException, SingletonException | ||
|
||
POOL_SIZE = 5 | ||
|
||
|
||
class SQLQueue: | ||
__instances = {} | ||
|
||
@staticmethod | ||
def get_instance(**database_args) -> 'SQLQueue': | ||
logger = logging.getLogger(__name__) | ||
""" Static access method. """ | ||
if str(database_args) not in SQLQueue.__instances.keys(): | ||
logger.info("Creating a new singleton instance") | ||
SQLQueue(**database_args) | ||
return SQLQueue.__instances[str(database_args)] | ||
|
||
def __init__(self, **database_args) -> 'SQLQueue': | ||
logger = logging.getLogger(__name__) | ||
""" Virtually private constructor. """ | ||
if str(database_args) in SQLQueue.__instances.keys(): | ||
logger.error("Attempted to create another instance of a singleton class") | ||
raise SingletonException("This class is a singleton! Please use get_instance()") | ||
else: | ||
logger.debug("Setting database args") | ||
self.__database_args = database_args | ||
logger.debug( | ||
"Database args set: {} (passwords omitted)".format( | ||
{k: v for k, v in self.__database_args.items() if not k == 'password'})) | ||
logger.debug("Creating immediate connection") | ||
self.__immediate_connection = mysql.connector.connect(**database_args) | ||
logger.debug(f"Immediate Connection opened: {self.__immediate_connection}") | ||
logger.debug("Creating Asynchronous connection pool") | ||
self.__other_connection_pool = mysql.connector.pooling.MySQLConnectionPool(pool_name="other_queries", | ||
pool_size=POOL_SIZE, | ||
**database_args) | ||
logger.debug(f"Connection pool created: {self.__other_connection_pool}") | ||
logger.debug("Setting Connections Open to 0") | ||
self.__other_connections_open = 0 | ||
logger.debug(f"Connections Open = {self.__other_connections_open}") | ||
logger.debug("Setting accepting flag to True") | ||
self.__async_query_queue_accepting = True | ||
logger.debug(f"Accepting flag = {self.__async_query_queue_accepting}") | ||
logger.debug("Setting running flag to True") | ||
self.__async_query_queue_runner_running = True | ||
logger.debug(f"Running flag = {self.__async_query_queue_runner_running}") | ||
|
||
logger.debug("Creating the consumer coroutines") | ||
self.__consumers = [] | ||
for i in range(POOL_SIZE): | ||
self.__consumers.append(self.__query_queue_consumer()) | ||
|
||
logger.debug(f"Consumer coroutines created: {self.__consumers}") | ||
logger.debug("Creating event loop for coroutines") | ||
self.__async_query_loop = asyncio.new_event_loop() | ||
logger.debug(f"Event loop created: {self.__async_query_loop}") | ||
logger.debug("Creating janus Queue") | ||
self.__query_queue = janus.Queue(loop=self.__async_query_loop) | ||
logger.debug(f"Janus Queue created: {self.__query_queue}") | ||
logger.debug("Creating async thread") | ||
self.__async_thread = Thread(target=SQLQueue.__start_loop, args=(self.__async_query_loop, self.__consumers)) | ||
logger.debug(f"Async thread created: {self.__async_thread}") | ||
logger.debug("Starting async thread") | ||
self.__async_thread.start() | ||
|
||
SQLQueue.__instances[str(database_args)] = self | ||
logger.debug("SQLQueue instance initialized and added") | ||
|
||
@staticmethod | ||
def __start_loop(loop, consumers): | ||
asyncio.set_event_loop(loop) | ||
loop.run_until_complete(asyncio.gather(*consumers)) | ||
|
||
def select(self, query: str, parameters: Iterable = None, fetch_all: bool = True) -> Dict[str, Any]: | ||
""" | ||
This is a blocking query to run a select query immediately. | ||
:param query: The SELECT query to run | ||
:param parameters: The parameters for this query | ||
:param fetch_all: boolean type, defaults to `True`, specifying if fetchall() or fetchone() should be used | ||
:return: The data queried | ||
""" | ||
if not bool(re.match('select', query, re.I)): | ||
raise InvalidArgumentException("Only SELECT queries can be placed here. Use execute() for other queries") | ||
|
||
cursor = self.__immediate_connection.cursor(dictionary=True, buffered=True) | ||
cursor.execute(query, parameters) | ||
|
||
if fetch_all: | ||
return cursor.fetchall() | ||
|
||
return cursor.fetchone() | ||
|
||
def execute(self, query: str, parameters: Iterable = None, | ||
callback: Optional[Callable[[List[Dict[str, Any]]], None]] = lambda *args, **kwargs: None) -> None: | ||
""" | ||
Places a query in the queue | ||
:param query: Query to run | ||
:param parameters: Query Parameters | ||
:param callback: Optional function to run once the query is complete. | ||
:return: Nothing | ||
""" | ||
logger = logging.getLogger(__name__) | ||
if self.__async_query_queue_accepting: | ||
logger.debug(f"Queuing query \"{query}\" with parameters {parameters} and callback {callback}") | ||
self.__query_queue.sync_q.put_nowait({'query': query, 'parameters': parameters, 'callback': callback}) | ||
logger.debug("Query is queued for execution") | ||
else: | ||
logger.error("Tried to queue a query when the queue is closed") | ||
logger.debug(f"Query \"{query}\" with parameters {parameters} and callback {callback}") | ||
raise ProgramClosingException("The queue has closed") | ||
|
||
def execute_with_result(self, query: str, parameters: Iterable = None): | ||
""" | ||
Blocking call | ||
""" | ||
logger = logging.getLogger(__name__) | ||
|
||
logging.debug(f"execute_with_result: query: {query}, parameters: {parameters}") | ||
cursor = self.__immediate_connection.cursor(dictionary=True, buffered=True) | ||
logger.debug(f"Executing the query {query} with parameters {parameters} ") | ||
cursor.execute(query, parameters) | ||
result = cursor.fetchall() | ||
logger.debug(f"Result: {result}") | ||
|
||
return result | ||
|
||
async def __query_queue_consumer(self): | ||
# Waits until there's a free connection | ||
logger = logging.getLogger(__name__) | ||
|
||
while self.__async_query_queue_runner_running: | ||
query = await self.__query_queue.async_q.get() | ||
query_hash = hash((query['query'], query['parameters'], query['callback'])) | ||
logger.debug( | ||
f"{query_hash}: Executing the query {query['query']} with parameters {query['parameters']}") | ||
|
||
self.__other_connections_open += 1 | ||
connection = self.__other_connection_pool.get_connection() | ||
|
||
cursor = connection.cursor(dictionary=True, buffered=True) | ||
cursor.execute(query['query'], query['parameters']) | ||
|
||
result = cursor.fetchall() | ||
logger.debug(f"{query_hash}: result: {result}") | ||
connection.commit() | ||
connection.close() | ||
|
||
self.__other_connections_open -= 1 | ||
logger.debug(f"{query_hash}: Connection closed. Running callback") | ||
query['callback'](result) | ||
|
||
logger.debug(f"{query_hash}: Finished processing") | ||
|
||
def __del__(self): | ||
logger = logging.getLogger(__name__) | ||
|
||
logger.info("Closing SQLQueue") | ||
logger.debug("Not accepting new queries") | ||
self.__async_query_queue_accepting = False | ||
logger.debug("Closing immediate connection") | ||
self.__immediate_connection.close() | ||
logger.info("Waiting for tasks to finish") | ||
self.__query_queue.sync_q.join() | ||
logger.debug("Terminating Consumers") | ||
self.__async_query_queue_runner_running = False | ||
logger.info("SQLQueue instance closed") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
class InvalidArgumentException(Exception): | ||
pass | ||
|
||
|
||
class ProgramClosingException(Exception): | ||
pass | ||
|
||
|
||
class SingletonException(Exception): | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
from flask import Flask, request, jsonify | ||
import postcodes_io_api | ||
import database | ||
|
||
app = Flask(__name__) | ||
|
||
|
||
@app.route('/getHouses', methods=['POST']) | ||
def postcodesIO(): | ||
frontData = request.get_json() | ||
latitude = frontData['lat'] | ||
longitude = frontData['lon'] | ||
radius = frontData['radius'] | ||
houseLimit = frontData['limit'] | ||
listOfPostcodes = callAPI(latitude, longitude, houseLimit, radius) | ||
# houseLimit used here to limit len(listOfPostcodes) amount of values in the SQL WHERE clause. | ||
varseq = ','.join(['%s']*len(listOfPostcodes)) | ||
|
||
statement = ( | ||
f"""SELECT post.id, house.paon, house.saon, post.street, post.postcode, props.initial, trans.price | ||
FROM postcodes AS post | ||
INNER JOIN houses AS house | ||
ON house.postcode_id = post.id | ||
INNER JOIN transactions AS trans | ||
ON trans.house_id = house.id | ||
INNER JOIN property_types AS props | ||
ON props.id = house.property_type_id | ||
WHERE post.postcode IN ({varseq});""" | ||
) | ||
|
||
result = db.select(query=statement, parameters=listOfPostcodes) | ||
|
||
return jsonify(result) | ||
|
||
|
||
def callAPI(lat, lon, rad, lim): | ||
api = postcodes_io_api.Api(debug_http=True) | ||
listPostcodes = api.get_nearest_postcodes_for_coordinates( | ||
latitude=lat, longitude=lon, limit=lim, radius=rad) | ||
onlyPostcodes = [] | ||
for i in range(len(listPostcodes["result"])): | ||
onlyPostcodes.append(listPostcodes["result"][i]["postcode"]) | ||
return onlyPostcodes | ||
|
||
|
||
if __name__ == '__main__': | ||
db = database.SQLQueue.get_instance( | ||
host="34.89.126.252", user="root", password={change on develop}, database="price_paid_data") | ||
app.run(host='0.0.0.0', port=80) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
# Database Scripts | ||
`price_paid_data.sql` contains the database definition for the `price_paid_data` schema in the database. Below is the schema diagram: | ||
|
||
![Database Schema](schema.svg) |
Oops, something went wrong.