Skip to content

Commit

Permalink
Merge branch 'master' into merge-functions
Browse files Browse the repository at this point in the history
  • Loading branch information
Jinnkunn authored Sep 11, 2024
2 parents ec3a75f + 92a594f commit aad8edc
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 19 deletions.
32 changes: 16 additions & 16 deletions aisdb/tests/test_013_proc_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,22 +50,22 @@ def test_write_csv_fromdict(tmpdir):
aisdb.proc_util.write_csv(tracks, fpath=os.path.join(tmpdir, "test_write_csv.csv"))


def test_write_csv_fromdict_marinetraffic(tmpdir):
dbpath = os.path.join(tmpdir, "test_write_csv.db")
months = sample_database_file(dbpath)
start = datetime(int(months[0][0:4]), int(months[0][4:6]), 1)
end = start + timedelta(weeks=4)

vinfo_db = VesselInfo(trafficDBpath).trafficDB

with DBConn(dbpath) as dbconn, vinfo_db as trafficDB:
qry = DBQuery(dbconn=dbconn, start=start, end=end, callback=sqlfcn_callbacks.in_timerange_validmmsi, )
qry.check_marinetraffic(trafficDBpath=trafficDBpath,
boundary={"xmin": -45, "xmax": -25, "ymin": 30, "ymax": 50, })

rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic_static, verbose=True)
tracks = vessel_info(track_gen.TrackGen(rowgen, decimate=True), trafficDB)
aisdb.proc_util.write_csv(tracks, fpath=os.path.join(tmpdir, "test_write_csv.csv"))
# def test_write_csv_fromdict_marinetraffic(tmpdir):
# dbpath = os.path.join(tmpdir, "test_write_csv.db")
# months = sample_database_file(dbpath)
# start = datetime(int(months[0][0:4]), int(months[0][4:6]), 1)
# end = start + timedelta(weeks=4)
#
# vinfo_db = VesselInfo(trafficDBpath).trafficDB
#
# with DBConn(dbpath) as dbconn, vinfo_db as trafficDB:
# qry = DBQuery(dbconn=dbconn, start=start, end=end, callback=sqlfcn_callbacks.in_timerange_validmmsi, )
# qry.check_marinetraffic(trafficDBpath=trafficDBpath,
# boundary={"xmin": -45, "xmax": -25, "ymin": 30, "ymax": 50, })
#
# rowgen = qry.gen_qry(fcn=sqlfcn.crawl_dynamic_static, verbose=True)
# tracks = vessel_info(track_gen.TrackGen(rowgen, decimate=True), trafficDB)
# aisdb.proc_util.write_csv(tracks, fpath=os.path.join(tmpdir, "test_write_csv.csv"))


def test_glob_files():
Expand Down
23 changes: 20 additions & 3 deletions aisdb/track_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,12 +162,15 @@ def split_timedelta(tracks, maxdelta=timedelta(weeks=2)):
threshold at which tracks should be
partitioned
'''
mmsi_count = {} # Dictionary to keep track of MMSI indices

for track in tracks:
for rng in _segment_rng(track, maxdelta):
assert len(rng) > 0
yield dict(
**{k: track[k]
for k in track['static']},

# Create the segmented track dictionary
segmented_track = dict(
**{k: track[k] for k in track['static']},
**{
k: np.array(track[k], dtype=type(track[k][0]))[rng]
for k in track['dynamic']
Expand All @@ -176,6 +179,20 @@ def split_timedelta(tracks, maxdelta=timedelta(weeks=2)):
dynamic=track['dynamic'],
)

# Handle MMSI indexing after segmentation
mmsi_value = segmented_track.get("mmsi")
if mmsi_value:
if mmsi_value not in mmsi_count:
mmsi_count[mmsi_value] = 0
else:
mmsi_count[mmsi_value] += 1

# Modify the mmsi value to attach an index
segmented_track["mmsi"] = f"{mmsi_value}-{mmsi_count[mmsi_value]}"

# Yield the segmented track with modified mmsi
yield segmented_track


def fence_tracks(tracks, domain):
''' compute points-in-polygons for vessel positions within domain polygons
Expand Down

0 comments on commit aad8edc

Please sign in to comment.