When calculating traffic volumes, deduplicate path requests first.

Around 2x less paths to calculate.

Even though the deduplication throws away some info, the net effect for
measuring traffic volumes is practically equivalent, so it's a
worthwhile optimization. Used the new comparison UI to verify that!
This commit is contained in:
Dustin Carlino 2022-01-27 14:21:19 +00:00
parent edfd320908
commit e21c57b70a
3 changed files with 47 additions and 17 deletions

View File

@ -27,7 +27,8 @@ pub struct Impact {
pub filters: Filters,
all_trips: Vec<PathRequest>,
filtered_trips: Vec<PathRequest>,
// A subset of all_trips, and the number of times somebody takes the same trip
filtered_trips: Vec<(PathRequest, usize)>,
pub compare_counts: CompareCounts,
pub change_key: usize,
@ -91,12 +92,14 @@ impl Impact {
.iter()
.map(|m| m.to_constraints())
.collect();
self.filtered_trips = self
.all_trips
self.filtered_trips = PathRequest::deduplicate(
map,
self.all_trips
.iter()
.filter(|req| constraints.contains(&req.constraints))
.cloned()
.collect();
.collect(),
);
let counts_a = count_throughput(
map,
@ -154,7 +157,7 @@ impl Impact {
fn count_throughput(
map: &Map,
description: String,
requests: &[PathRequest],
requests: &[(PathRequest, usize)],
params: RoutingParams,
cache_custom: PathfinderCaching,
timer: &mut Timer,
@ -179,16 +182,17 @@ fn count_throughput(
// right now won't let that work. Stick to single-threaded for now.
timer.start_iter("calculate routes", requests.len());
for req in requests {
for (req, count) in requests {
timer.next();
if let Ok(path) = map.pathfind_v2_with_params(req.clone(), &params, cache_custom) {
let count = *count;
for step in path.get_steps() {
match step {
PathStepV2::Along(dr) | PathStepV2::Contraflow(dr) => {
road_counts.inc(dr.road);
road_counts.add(dr.road, count);
}
PathStepV2::Movement(m) | PathStepV2::ContraflowMovement(m) => {
intersection_counts.inc(m.parent);
intersection_counts.add(m.parent, count);
}
}
}

View File

@ -12,11 +12,6 @@ use widgetry::{
use crate::tools::{cmp_count, ColorNetwork, DivergingScale};
use crate::AppLike;
// TODO Document all of this!
// 4) See if we can dedupe requests in the impact prediction -- using this tool to validate
// 5) Download the sensor data and get it in this format (and maybe filter simulated data to only
// match roads we have)
#[derive(Serialize, Deserialize)]
pub struct Counts {
pub map: MapName,

View File

@ -1,9 +1,10 @@
use std::collections::VecDeque;
use std::collections::{BTreeMap, VecDeque};
use std::fmt;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use abstutil::prettyprint_usize;
use geom::{Distance, Duration, PolyLine, Speed, EPSILON_DIST};
use crate::{
@ -679,6 +680,36 @@ impl PathRequest {
alt_start: None,
})
}
/// Group similar requests together, returning the number of matches. This can be used to
/// calculate less paths and multiply whatever's being measured by the count.
///
/// Note this throws away detail. It only groups by the mode and from/to parent. Exact position
/// and alternate starting points are lost.
pub fn deduplicate(map: &Map, requests: Vec<PathRequest>) -> Vec<(PathRequest, usize)> {
let count_before = requests.len();
let mut common: BTreeMap<
(PathConstraints, DirectedRoadID, DirectedRoadID),
(PathRequest, usize),
> = BTreeMap::new();
for req in requests {
let key = (
req.constraints,
map.get_l(req.start.lane()).get_directed_parent(),
map.get_l(req.end.lane()).get_directed_parent(),
);
let pair = common.entry(key).or_insert_with(|| (req, 0));
pair.1 += 1;
}
if false {
info!(
"{} requests deduplicated down to {}",
prettyprint_usize(count_before),
prettyprint_usize(common.len())
);
}
common.into_values().collect()
}
}
fn validate_continuity(map: &Map, steps: &[PathStep]) {