Skip to content

Commit

Permalink
Refactor codebase to use consistent single-quote strings
Browse files Browse the repository at this point in the history
This commit standardizes the string formatting across the codebase to use single-quote strings instead of a mixture of single and double quotes. This doesn't affect functionality, but better adheres to Python PEP8 style guide and improves the overall readability and consistency of the code.
  • Loading branch information
gabrielspadon committed Feb 28, 2024
1 parent c2a8a24 commit 936569d
Show file tree
Hide file tree
Showing 19 changed files with 289 additions and 304 deletions.
2 changes: 1 addition & 1 deletion aisdb/database/decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def decode_msgs(filepaths, dbconn, source, vacuum=False, skip_checksum=False, ra
dbconn.execute("VACUUM")
elif isinstance(vacuum, str):
assert not os.path.isfile(vacuum)
dbconn.execute(f"VACUUM INTO {vacuum}")
dbconn.execute("VACUUM INTO ?", (vacuum,))
else:
raise ValueError(
"vacuum arg must be boolean or filepath string")
Expand Down
33 changes: 16 additions & 17 deletions aisdb/tests/create_testing_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,27 @@
import numpy as np
from shapely.geometry import Polygon

from aisdb.gis import Domain
from aisdb import decode_msgs, DBConn
from aisdb.database.create_tables import (
sql_createtable_dynamic,
sql_createtable_static,
)
from aisdb import decode_msgs, DBConn

from aisdb.gis import Domain


def sample_dynamictable_insertdata(*, dbconn):
#db = DBConn()
# db = DBConn()
assert isinstance(dbconn, DBConn)
dbconn.execute(sql_createtable_static.format(month="200001"))
dbconn.execute(sql_createtable_dynamic.format(month="200001"))
dbconn.execute(
'INSERT OR IGNORE INTO ais_200001_dynamic (mmsi, time, longitude, latitude, cog, sog) VALUES (000000001, 946702800, -60.994833, 47.434647238127695, -1, -1)'
"INSERT OR IGNORE INTO ais_200001_dynamic (mmsi, time, longitude, latitude, cog, sog) VALUES (000000001, 946702800, -60.994833, 47.434647238127695, -1, -1)"
)
dbconn.execute(
'INSERT OR IGNORE INTO ais_200001_dynamic (mmsi, time, longitude, latitude, cog, sog) VALUES (000000001, 946702820, -60.994833, 47.434647238127695, -1, -1)'
"INSERT OR IGNORE INTO ais_200001_dynamic (mmsi, time, longitude, latitude, cog, sog) VALUES (000000001, 946702820, -60.994833, 47.434647238127695, -1, -1)"
)
dbconn.execute(
'INSERT OR IGNORE INTO ais_200001_dynamic (mmsi, time, longitude, latitude, cog, sog) VALUES (000000001, 946702840, -60.994833, 47.434647238127695, -1, -1)'
"INSERT OR IGNORE INTO ais_200001_dynamic (mmsi, time, longitude, latitude, cog, sog) VALUES (000000001, 946702840, -60.994833, 47.434647238127695, -1, -1)"
)
dbconn.commit()

Expand Down Expand Up @@ -60,27 +59,27 @@ def sample_gulfstlawrence_bbox():


def random_polygons_domain(count=10):
return Domain('testdomain',
return Domain("testdomain",
[{
'name': f'random_{i:03}',
'geometry': Polygon(zip(*sample_random_polygon()))
"name": f"random_{i:03}",
"geometry": Polygon(zip(*sample_random_polygon()))
} for i in range(count)])


def sample_database_file(dbpath):
''' test data for date 2021-11-01 '''
assert os.path.isdir(os.path.join(os.path.dirname(__file__), 'testdata'))
datapath_csv = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20210701.csv')
""" test data for date 2021-11-01 """
assert os.path.isdir(os.path.join(os.path.dirname(__file__), "testdata"))
datapath_csv = os.path.join(os.path.dirname(__file__), "testdata",
"test_data_20210701.csv")
# no static data in nm4
datapath_nm4 = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20211101.nm4')
datapath_nm4 = os.path.join(os.path.dirname(__file__), "testdata",
"test_data_20211101.nm4")
months = ["202107", "202111"]
with DBConn(dbpath) as dbconn:
decode_msgs(
dbconn=dbconn,
filepaths=[datapath_csv, datapath_nm4],
source='TESTING',
source="TESTING",
vacuum=False,
skip_checksum=True,
)
Expand Down
32 changes: 14 additions & 18 deletions aisdb/tests/test_00_decode.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,31 +6,27 @@


def test_decode_1day(tmpdir):
dbpath = os.path.join(tmpdir, 'test_decode_1day.db')
dbpath = os.path.join(tmpdir, "test_decode_1day.db")
testing_data_zip = os.path.join(os.path.dirname(__file__), "testdata", "test_data_20211101.nm4.zip")
testing_data_gz = os.path.join(os.path.dirname(__file__), "testdata", "test_data_20211101.nm4.gz")
testing_data_csv = os.path.join(os.path.dirname(__file__), "testdata", "test_data_20210701.csv")
testing_data_nm4 = os.path.join(os.path.dirname(__file__), "testdata", "test_data_20211101.nm4")
print("\n ---> ", dbpath)
testingdata_nm4 = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20211101.nm4')
testingdata_csv = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20210701.csv')
testingdata_gz = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20211101.nm4.gz')
testingdata_zip = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20211101.nm4.zip')
with DBConn(dbpath) as dbconn:

with DBConn(dbpath) as db_conn:
filepaths = [
testingdata_nm4, testingdata_csv, testingdata_gz, testingdata_zip
testing_data_nm4, testing_data_csv, testing_data_gz, testing_data_zip
]
dt = datetime.now()
decode_msgs(filepaths=filepaths,
dbconn=dbconn,
source='TESTING',
dbconn=db_conn,
source="TESTING",
vacuum=True)
decode_msgs(filepaths=filepaths,
dbconn=dbconn,
source='TESTING',
vacuum=dbpath + '.vacuum')
dbconn=db_conn,
source="TESTING",
vacuum=dbpath + ".vacuum")
delta = datetime.now() - dt
print(
f'sqlite total parse and insert time: {delta.total_seconds():.2f}s'
f"sqlite total parse and insert time: {delta.total_seconds():.2f}s"
)

31 changes: 14 additions & 17 deletions aisdb/tests/test_01_createtables.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,44 @@
import os
import warnings

from aisdb.database.dbconn import DBConn, PostgresDBConn
from aisdb.database.decoder import decode_msgs
from aisdb.database.create_tables import (
sql_createtable_dynamic,
sql_createtable_static,
)

from aisdb.database.dbconn import DBConn
from aisdb.database.decoder import decode_msgs


def test_create_static_table(tmpdir):
dbpath = os.path.join(tmpdir, 'test_create_static_table.db')
dbpath = os.path.join(tmpdir, "test_create_static_table.db")
with DBConn(dbpath) as dbconn:
dbconn.execute(sql_createtable_dynamic.format("202009"))


def test_create_dynamic_table(tmpdir):
dbpath = os.path.join(tmpdir, 'test_create_dynamic_table.db')
dbpath = os.path.join(tmpdir, "test_create_dynamic_table.db")
with DBConn(dbpath) as dbconn:
dbconn.execute(sql_createtable_dynamic.format("202009"))


def test_create_static_aggregate_table(tmpdir):
warnings.filterwarnings('error')
dbpath = os.path.join(tmpdir, 'test_create_static_aggregate_table.db')
testingdata_csv = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20210701.csv')
warnings.filterwarnings("error")
dbpath = os.path.join(tmpdir, "test_create_static_aggregate_table.db")
testingdata_csv = os.path.join(os.path.dirname(__file__), "testdata",
"test_data_20210701.csv")
with DBConn(dbpath) as dbconn:
decode_msgs([testingdata_csv], dbconn=dbconn, source='TESTING')
decode_msgs([testingdata_csv], dbconn=dbconn, source="TESTING")
dbconn.aggregate_static_msgs(["202107"])


def test_create_from_CSV(tmpdir):
testingdata_csv = os.path.join(os.path.dirname(__file__), 'testdata',
'test_data_20210701.csv')
dbpath = os.path.join(tmpdir, 'test_create_from_CSV.db')
testingdata_csv = os.path.join(os.path.dirname(__file__), "testdata",
"test_data_20210701.csv")
dbpath = os.path.join(tmpdir, "test_create_from_CSV.db")
with DBConn(dbpath) as dbconn:
decode_msgs(
dbconn=dbconn,
filepaths=[testingdata_csv],
source='TESTING',
source="TESTING",
vacuum=False,
)

Expand All @@ -50,7 +48,6 @@ def test_create_from_CSV(tmpdir):
"SELECT name FROM sqlite_schema "
"WHERE type='table' ORDER BY name;")
rows = cur.fetchall()
temp = [row['name'] for row in rows]
temp = [row["name"] for row in rows]
print(temp)
assert len(temp) == 5

35 changes: 15 additions & 20 deletions aisdb/tests/test_02_dbqry.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,19 @@
DBConn,
DBQuery,
Domain,
PostgresDBConn,
sqlfcn,
sqlfcn_callbacks,
)
from aisdb.database.create_tables import sql_createtable_dynamic
from aisdb.database.decoder import decode_msgs
from aisdb.tests.create_testing_data import (
sample_database_file,
sample_gulfstlawrence_bbox,
)
from aisdb.track_gen import TrackGen


def test_query_emptytable(tmpdir):
warnings.filterwarnings('error')
dbpath = os.path.join(tmpdir, 'test_query_emptytable.db')
warnings.filterwarnings("error")
dbpath = os.path.join(tmpdir, "test_query_emptytable.db")
try:
with DBConn(dbpath) as dbconn:
q = DBQuery(
Expand All @@ -32,23 +29,22 @@ def test_query_emptytable(tmpdir):
end=datetime(2021, 1, 7),
callback=sqlfcn_callbacks.in_timerange_validmmsi,
)
dbconn.execute(sql_createtable_dynamic.format('202101'))
dbconn.execute(sql_createtable_dynamic.format("202101"))
rows = q.gen_qry(reaggregate_static=True)
assert list(rows) == []
except UserWarning as warn:
assert 'No static data for selected time range!' in warn.args[0]
assert "No static data for selected time range!" in warn.args[0]
except Exception as err:
raise err


def test_prepare_qry_domain(tmpdir):

testdbpath = os.path.join(tmpdir, 'test_prepare_qry_domain.db')
testdbpath = os.path.join(tmpdir, "test_prepare_qry_domain.db")
months = sample_database_file(testdbpath)
start = datetime(int(months[0][0:4]), int(months[0][4:6]), 1)
end = start + timedelta(weeks=4)
z1 = Polygon(zip(*sample_gulfstlawrence_bbox()))
domain = Domain('gulf domain', zones=[{'name': 'z1', 'geometry': z1}])
domain = Domain("gulf domain", zones=[{"name": "z1", "geometry": z1}])
with DBConn(testdbpath) as aisdatabase:
rowgen = DBQuery(
dbconn=aisdatabase,
Expand All @@ -61,22 +57,22 @@ def test_prepare_qry_domain(tmpdir):


def test_sql_query_strings(tmpdir):
testdbpath = os.path.join(tmpdir, 'test_sql_query_strings.db')
testdbpath = os.path.join(tmpdir, "test_sql_query_strings.db")
months = sample_database_file(testdbpath)
start = datetime(int(months[0][0:4]), int(months[0][4:6]), 1)
end = start + timedelta(weeks=4)
z1 = Polygon(zip(*sample_gulfstlawrence_bbox()))
domain = Domain('gulf domain', zones=[{'name': 'z1', 'geometry': z1}])
domain = Domain("gulf domain", zones=[{"name": "z1", "geometry": z1}])

with DBConn(testdbpath) as aisdatabase:
for callback in [
sqlfcn_callbacks.in_bbox,
sqlfcn_callbacks.in_bbox_time,
sqlfcn_callbacks.in_bbox_time_validmmsi,
sqlfcn_callbacks.in_time_bbox_inmmsi,
sqlfcn_callbacks.in_timerange,
sqlfcn_callbacks.in_timerange_hasmmsi,
sqlfcn_callbacks.in_timerange_validmmsi,
sqlfcn_callbacks.in_bbox,
sqlfcn_callbacks.in_bbox_time,
sqlfcn_callbacks.in_bbox_time_validmmsi,
sqlfcn_callbacks.in_time_bbox_inmmsi,
sqlfcn_callbacks.in_timerange,
sqlfcn_callbacks.in_timerange_hasmmsi,
sqlfcn_callbacks.in_timerange_validmmsi,
]:
rowgen = DBQuery(
dbconn=aisdatabase,
Expand All @@ -88,4 +84,3 @@ def test_sql_query_strings(tmpdir):
mmsis=[316000000, 316000001],
).gen_qry(fcn=sqlfcn.crawl_dynamic_static)
next(rowgen)

36 changes: 18 additions & 18 deletions aisdb/tests/test_02_sqlfcn.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@


def test_dynamic(tmpdir):
dbpath = os.path.join(tmpdir, 'test_sqlfcn_dynamic.db')
dbpath = os.path.join(tmpdir, "test_sqlfcn_dynamic.db")
month = "202105"
callback = sqlfcn_callbacks.in_time_bbox_validmmsi
txt = sqlfcn._dynamic(dbpath=dbpath,
Expand All @@ -29,7 +29,7 @@ def test_dynamic(tmpdir):


def test_static(tmpdir):
dbpath = os.path.join(tmpdir, 'test_sqlfcn_static.db')
dbpath = os.path.join(tmpdir, "test_sqlfcn_static.db")
month = "202105"
txt = sqlfcn._static(dbpath=dbpath, month=month)
print(txt)
Expand All @@ -42,8 +42,8 @@ def test_leftjoin():


def test_crawl(tmpdir):
dbpath = os.path.join(tmpdir, 'test_sqlfcn_crawl.db')
months = ['202105']
dbpath = os.path.join(tmpdir, "test_sqlfcn_crawl.db")
months = ["202105"]
callback = sqlfcn_callbacks.in_time_bbox_validmmsi
txt = sqlfcn.crawl_dynamic_static(dbpath=dbpath,
months=months,
Expand All @@ -58,22 +58,22 @@ def test_crawl(tmpdir):


def test_callbacks(tmpdir):
dbpath = os.path.join(tmpdir, 'test_sqlfcn_callbacks.db')
months = ['202105']
dbpath = os.path.join(tmpdir, "test_sqlfcn_callbacks.db")
months = ["202105"]
callback = sqlfcn_callbacks.in_time_bbox_validmmsi
for callback in [
sqlfcn_callbacks.in_bbox,
sqlfcn_callbacks.in_bbox_time,
sqlfcn_callbacks.in_bbox_time_validmmsi,
sqlfcn_callbacks.in_time_bbox,
sqlfcn_callbacks.in_time_bbox_hasmmsi,
sqlfcn_callbacks.in_time_bbox_inmmsi,
sqlfcn_callbacks.in_time_bbox_validmmsi,
sqlfcn_callbacks.in_time_mmsi,
sqlfcn_callbacks.in_timerange,
sqlfcn_callbacks.in_timerange_hasmmsi,
sqlfcn_callbacks.in_timerange_inmmsi,
sqlfcn_callbacks.in_timerange_validmmsi,
sqlfcn_callbacks.in_bbox,
sqlfcn_callbacks.in_bbox_time,
sqlfcn_callbacks.in_bbox_time_validmmsi,
sqlfcn_callbacks.in_time_bbox,
sqlfcn_callbacks.in_time_bbox_hasmmsi,
sqlfcn_callbacks.in_time_bbox_inmmsi,
sqlfcn_callbacks.in_time_bbox_validmmsi,
sqlfcn_callbacks.in_time_mmsi,
sqlfcn_callbacks.in_timerange,
sqlfcn_callbacks.in_timerange_hasmmsi,
sqlfcn_callbacks.in_timerange_inmmsi,
sqlfcn_callbacks.in_timerange_validmmsi,
]:
box_x = sorted(np.random.random(2) * 360 - 180)
box_y = sorted(np.random.random(2) * 180 - 90)
Expand Down
Loading

0 comments on commit 936569d

Please sign in to comment.