mirror of
https://github.com/a-b-street/abstreet.git
synced 2024-12-29 01:13:53 +03:00
switch to a real gtfs parser. should help towards #27 -- I was
hardcoding CSV indices for one particular GTFS feed
This commit is contained in:
parent
9aa4fda837
commit
adb2a970dd
@ -41,7 +41,7 @@ pub fn convert(flags: &Flags, timer: &mut abstutil::Timer) -> RawMap {
|
||||
}
|
||||
if let Some(ref path) = flags.gtfs {
|
||||
timer.start("load GTFS");
|
||||
map.bus_routes = gtfs::load(path).unwrap();
|
||||
map.bus_routes = gtfs::load(path);
|
||||
timer.stop("load GTFS");
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,9 @@
|
||||
# Importing a new city into A/B Street
|
||||
|
||||
My current priority is to make Seattle work very well, but if you want to try
|
||||
out A/B Street in another place, you can follow this guide. Please file a Github
|
||||
issue or email <dabreegster@gmail.com> if you hit any problems.
|
||||
out A/B Street in another place, you can follow this guide. Add to [this
|
||||
issue](https://github.com/dabreegster/abstreet/issues/27) if you find a new
|
||||
problem.
|
||||
|
||||
First obtain a `.osm` with your desired area. You can use a tool like Osmosis to
|
||||
clip a specific area from a large file. Put the `.osm` in `data/input/osm`.
|
||||
|
@ -6,9 +6,8 @@ edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
abstutil = { path = "../abstutil" }
|
||||
csv = "1.0.1"
|
||||
failure = "0.1.2"
|
||||
geom = { path = "../geom" }
|
||||
itertools = "0.8.0"
|
||||
serde = "1.0.98"
|
||||
serde_derive = "1.0.98"
|
||||
transitfeed = "0.3.0"
|
||||
|
@ -1,11 +1,8 @@
|
||||
use abstutil::elapsed_seconds;
|
||||
use failure::Error;
|
||||
use geom::LonLat;
|
||||
use itertools::Itertools;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
use std::fs::File;
|
||||
use std::time::Instant;
|
||||
use transitfeed::GTFSIterator;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub struct Route {
|
||||
@ -13,40 +10,50 @@ pub struct Route {
|
||||
pub stops: Vec<LonLat>,
|
||||
}
|
||||
|
||||
pub fn load(dir_path: &str) -> Result<Vec<Route>, Error> {
|
||||
pub fn load(dir_path: &str) -> Vec<Route> {
|
||||
println!("Loading GTFS from {}", dir_path);
|
||||
let timer = Instant::now();
|
||||
|
||||
let mut route_id_to_name: HashMap<String, String> = HashMap::new();
|
||||
for rec in csv::Reader::from_reader(File::open(format!("{}/routes.txt", dir_path))?).records() {
|
||||
let rec = rec?;
|
||||
route_id_to_name.insert(rec[0].to_string(), rec[2].to_string());
|
||||
for rec in GTFSIterator::<_, transitfeed::Route>::from_path(&format!("{}/routes.txt", dir_path))
|
||||
.unwrap()
|
||||
{
|
||||
let rec = rec.unwrap();
|
||||
route_id_to_name.insert(rec.route_id.clone(), rec.route_short_name.clone());
|
||||
}
|
||||
|
||||
let mut stop_id_to_pt: HashMap<String, LonLat> = HashMap::new();
|
||||
for rec in csv::Reader::from_reader(File::open(format!("{}/stops.txt", dir_path))?).records() {
|
||||
let rec = rec?;
|
||||
let lon: f64 = rec[5].parse()?;
|
||||
let lat: f64 = rec[4].parse()?;
|
||||
stop_id_to_pt.insert(rec[0].to_string(), LonLat::new(lon, lat));
|
||||
for rec in
|
||||
GTFSIterator::<_, transitfeed::Stop>::from_path(&format!("{}/stops.txt", dir_path)).unwrap()
|
||||
{
|
||||
let rec = rec.unwrap();
|
||||
stop_id_to_pt.insert(rec.stop_id.clone(), LonLat::new(rec.stop_lon, rec.stop_lat));
|
||||
}
|
||||
|
||||
let mut trip_id_to_route_id_and_direction: HashMap<String, (String, bool)> = HashMap::new();
|
||||
for rec in csv::Reader::from_reader(File::open(format!("{}/trips.txt", dir_path))?).records() {
|
||||
let rec = rec?;
|
||||
trip_id_to_route_id_and_direction
|
||||
.insert(rec[2].to_string(), (rec[0].to_string(), &rec[5] == "0"));
|
||||
for rec in
|
||||
GTFSIterator::<_, transitfeed::Trip>::from_path(&format!("{}/trips.txt", dir_path)).unwrap()
|
||||
{
|
||||
let rec = rec.unwrap();
|
||||
trip_id_to_route_id_and_direction.insert(
|
||||
rec.trip_id.clone(),
|
||||
(
|
||||
rec.route_id.clone(),
|
||||
rec.direction_id.map(|d| d == "0").unwrap_or(true),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Each (directed) route has many trips. Ignore all but the first and assume the list of stops
|
||||
// is the same. Also assume that records with the same trip are contiguous and that
|
||||
// stop_sequence is monotonic.
|
||||
let mut directed_routes: HashMap<(String, bool), Vec<LonLat>> = HashMap::new();
|
||||
let mut reader = csv::Reader::from_reader(File::open(format!("{}/stop_times.txt", dir_path))?);
|
||||
for (key, group) in reader
|
||||
.records()
|
||||
.group_by(|rec| rec.as_ref().unwrap()[0].to_string())
|
||||
.into_iter()
|
||||
for (key, group) in
|
||||
GTFSIterator::<_, transitfeed::StopTime>::from_path(&format!("{}/stop_times.txt", dir_path))
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|rec| rec.unwrap())
|
||||
.group_by(|rec| rec.trip_id.clone())
|
||||
.into_iter()
|
||||
{
|
||||
let (route_id, forwards) = trip_id_to_route_id_and_direction[&key].clone();
|
||||
if directed_routes.contains_key(&(route_id.clone(), forwards)) {
|
||||
@ -54,7 +61,7 @@ pub fn load(dir_path: &str) -> Result<Vec<Route>, Error> {
|
||||
}
|
||||
directed_routes.insert(
|
||||
(route_id, forwards),
|
||||
group.map(|rec| stop_id_to_pt[&rec.unwrap()[3]]).collect(),
|
||||
group.map(|rec| stop_id_to_pt[&rec.stop_id]).collect(),
|
||||
);
|
||||
}
|
||||
|
||||
@ -79,6 +86,5 @@ pub fn load(dir_path: &str) -> Result<Vec<Route>, Error> {
|
||||
}
|
||||
assert!(directed_routes.is_empty());
|
||||
|
||||
println!("Loading GTFS took {}s", elapsed_seconds(timer));
|
||||
Ok(results)
|
||||
results
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user