Skip to content

Commit

Permalink
Merge pull request #146 from antoine-de/limit_shrink
Browse files Browse the repository at this point in the history
Limit the number of shrink to fit
  • Loading branch information
antoine-de authored Oct 25, 2023
2 parents 70de399 + 9ed8b94 commit e625570
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 2 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
description = "Read GTFS (public transit timetables) files"
name = "gtfs-structures"
version = "0.36.0"
version = "0.36.1"
authors = ["Tristram Gräbener <[email protected]>", "Antoine Desbordes <[email protected]>"]
repository = "https://github.com/rust-transit/gtfs-structure"
license = "MIT"
Expand Down
10 changes: 9 additions & 1 deletion src/gtfs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,10 @@ fn to_calendar_dates(cd: Vec<CalendarDate>) -> HashMap<String, Vec<CalendarDate>
res
}

// Number of stoptimes to `pop` from the list before using shrink_to_fit to reduce the memory footprint
// Hardcoded to what seems a sensible value, but if needed we could make this a parameter, feel free to open an issue if this could help
const NB_STOP_TIMES_BEFORE_SHRINK: usize = 1_000_000;

fn create_trips(
raw_trips: Vec<RawTrip>,
mut raw_stop_times: Vec<RawStopTime>,
Expand All @@ -311,15 +315,19 @@ fn create_trips(
frequencies: vec![],
}));

let mut st_idx = 0;
while let Some(s) = raw_stop_times.pop() {
st_idx += 1;
let trip = &mut trips
.get_mut(&s.trip_id)
.ok_or_else(|| Error::ReferenceError(s.trip_id.to_string()))?;
let stop = stops
.get(&s.stop_id)
.ok_or_else(|| Error::ReferenceError(s.stop_id.to_string()))?;
trip.stop_times.push(StopTime::from(s, Arc::clone(stop)));
raw_stop_times.shrink_to_fit();
if st_idx % NB_STOP_TIMES_BEFORE_SHRINK == 0 {
raw_stop_times.shrink_to_fit();
}
}

for trip in &mut trips.values_mut() {
Expand Down

0 comments on commit e625570

Please sign in to comment.