mirror of
https://github.com/a-b-street/abstreet.git
synced 2024-11-26 22:40:03 +03:00
Tighten the GDAL flag guarding in the importer. This removes some
noise from 'cargo check' and is also a step towards running the importer in a Docker container without having to rebuild it there. #326
This commit is contained in:
parent
c9be865966
commit
4ac8c6e30b
@ -16,7 +16,6 @@ mod berlin;
|
|||||||
mod configuration;
|
mod configuration;
|
||||||
mod generic;
|
mod generic;
|
||||||
mod seattle;
|
mod seattle;
|
||||||
#[cfg(feature = "scenarios")]
|
|
||||||
mod soundcast;
|
mod soundcast;
|
||||||
mod uk;
|
mod uk;
|
||||||
mod utils;
|
mod utils;
|
||||||
@ -155,28 +154,17 @@ impl Job {
|
|||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
let (maybe_popdat, maybe_huge_map, maybe_zoning_parcels) =
|
let (maybe_popdat, maybe_huge_map, maybe_zoning_parcels) = if self.scenario
|
||||||
if self.scenario && self.city == CityName::seattle() {
|
&& self.city == CityName::seattle()
|
||||||
#[cfg(feature = "scenarios")]
|
{
|
||||||
{
|
let (popdat, huge_map) = seattle::ensure_popdat_exists(timer, config).await;
|
||||||
let (popdat, huge_map) = seattle::ensure_popdat_exists(timer, config).await;
|
// Just assume --raw has been called...
|
||||||
// Just assume --raw has been called...
|
let shapes: kml::ExtraShapes =
|
||||||
let shapes: kml::ExtraShapes = abstio::read_binary(
|
abstio::read_binary(CityName::seattle().input_path("zoning_parcels.bin"), timer);
|
||||||
CityName::seattle().input_path("zoning_parcels.bin"),
|
(Some(popdat), Some(huge_map), Some(shapes))
|
||||||
timer,
|
} else {
|
||||||
);
|
(None, None, None)
|
||||||
(Some(popdat), Some(huge_map), Some(shapes))
|
};
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "scenarios"))]
|
|
||||||
{
|
|
||||||
panic!("Can't do --scenario without the scenarios feature compiled in");
|
|
||||||
// Nonsense to make the type-checker work
|
|
||||||
(Some(true), Some(true), Some(true))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(None, None, None)
|
|
||||||
};
|
|
||||||
|
|
||||||
for name in names {
|
for name in names {
|
||||||
if self.osm_to_raw {
|
if self.osm_to_raw {
|
||||||
@ -240,7 +228,6 @@ impl Job {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if self.scenario {
|
if self.scenario {
|
||||||
#[cfg(feature = "scenarios")]
|
|
||||||
if self.city == CityName::seattle() {
|
if self.city == CityName::seattle() {
|
||||||
timer.start(format!("scenario for {}", name.describe()));
|
timer.start(format!("scenario for {}", name.describe()));
|
||||||
let scenario = soundcast::make_weekday_scenario(
|
let scenario = soundcast::make_weekday_scenario(
|
||||||
|
@ -153,7 +153,6 @@ pub async fn osm_to_raw(name: &str, timer: &mut Timer<'_>, config: &ImporterConf
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Download and pre-process data needed to generate Seattle scenarios.
|
/// Download and pre-process data needed to generate Seattle scenarios.
|
||||||
#[cfg(feature = "scenarios")]
|
|
||||||
pub async fn ensure_popdat_exists(
|
pub async fn ensure_popdat_exists(
|
||||||
timer: &mut Timer<'_>,
|
timer: &mut Timer<'_>,
|
||||||
config: &ImporterConfiguration,
|
config: &ImporterConfiguration,
|
||||||
|
@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use abstio::{CityName, FileWithProgress};
|
use abstio::{CityName, FileWithProgress};
|
||||||
use abstutil::{prettyprint_usize, Counter, Timer};
|
use abstutil::{prettyprint_usize, Counter, Timer};
|
||||||
use geom::{Distance, Duration, FindClosest, LonLat, Time};
|
use geom::{Distance, Duration, LonLat, Time};
|
||||||
use kml::{ExtraShape, ExtraShapes};
|
use kml::{ExtraShape, ExtraShapes};
|
||||||
use map_model::{osm, Map};
|
use map_model::{osm, Map};
|
||||||
use sim::{OrigPersonID, TripMode, TripPurpose};
|
use sim::{OrigPersonID, TripMode, TripPurpose};
|
||||||
@ -113,10 +113,13 @@ fn import_trips(huge_map: &Map, timer: &mut Timer) -> Vec<OrigTrip> {
|
|||||||
|
|
||||||
// TODO Do we also need the zone ID, or is parcel ID globally unique?
|
// TODO Do we also need the zone ID, or is parcel ID globally unique?
|
||||||
// Keyed by parcel ID
|
// Keyed by parcel ID
|
||||||
|
#[cfg(feature = "scenarios")]
|
||||||
fn import_parcels(
|
fn import_parcels(
|
||||||
huge_map: &Map,
|
huge_map: &Map,
|
||||||
timer: &mut Timer,
|
timer: &mut Timer,
|
||||||
) -> (HashMap<usize, Endpoint>, BTreeMap<usize, ExtraShape>) {
|
) -> (HashMap<usize, Endpoint>, BTreeMap<usize, ExtraShape>) {
|
||||||
|
use geom::FindClosest;
|
||||||
|
|
||||||
// TODO I really just want to do polygon containment with a quadtree. FindClosest only does
|
// TODO I really just want to do polygon containment with a quadtree. FindClosest only does
|
||||||
// line-string stuff right now, which'll be weird for the last->first pt line and stuff.
|
// line-string stuff right now, which'll be weird for the last->first pt line and stuff.
|
||||||
let mut closest_bldg: FindClosest<osm::OsmID> = FindClosest::new(huge_map.get_bounds());
|
let mut closest_bldg: FindClosest<osm::OsmID> = FindClosest::new(huge_map.get_bounds());
|
||||||
@ -220,6 +223,14 @@ fn import_parcels(
|
|||||||
(result, shapes)
|
(result, shapes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "scenarios"))]
|
||||||
|
fn import_parcels(
|
||||||
|
_: &Map,
|
||||||
|
_: &mut Timer,
|
||||||
|
) -> (HashMap<usize, Endpoint>, BTreeMap<usize, ExtraShape>) {
|
||||||
|
panic!("Can't import_parcels for popdat.bin without the scenarios feature (GDAL dependency)");
|
||||||
|
}
|
||||||
|
|
||||||
// From https://github.com/psrc/soundcast/wiki/Outputs#trip-file-_triptsv, opurp and dpurp
|
// From https://github.com/psrc/soundcast/wiki/Outputs#trip-file-_triptsv, opurp and dpurp
|
||||||
fn get_purpose(code: &str) -> TripPurpose {
|
fn get_purpose(code: &str) -> TripPurpose {
|
||||||
match code {
|
match code {
|
||||||
|
Loading…
Reference in New Issue
Block a user