moving parcel grouping into convert_osm, which is run with --release anyway

This commit is contained in:
Dustin Carlino 2018-08-06 12:58:49 -07:00
parent e31061860b
commit 82902199a2
9 changed files with 107 additions and 86 deletions

View File

@ -7,6 +7,7 @@ authors = ["Dustin Carlino <dabreegster@gmail.com>"]
abstutil = { path = "../abstutil" }
byteorder = "1.2.1"
dimensioned = { git = "https://github.com/paholg/dimensioned", rev = "0e1076ebfa5128d1ee544bdc9754c948987b6fe3", features = ["serde"] }
geo = "0.9.1"
geom = { path = "../geom" }
ordered-float = "0.5.0"
osm-xml = "0.5.1"

View File

@ -0,0 +1,97 @@
use abstutil::MultiMap;
use geo;
use geom::LonLat;
use map_model::raw_data;
use std::collections::BTreeSet;
// Slight cheat
type ParcelIdx = usize;
pub fn group_parcels(parcels: &mut Vec<raw_data::Parcel>) {
// First compute which parcels intersect
let mut adjacency: MultiMap<ParcelIdx, ParcelIdx> = MultiMap::new();
// TODO could use quadtree to prune
println!(
"Precomputing adjacency between {} parcels...",
parcels.len()
);
let mut adj_counter = 0;
for (p1_idx, p1) in parcels.iter().enumerate() {
for (p2_idx, p2) in parcels.iter().enumerate() {
if p1_idx < p2_idx && polygons_intersect(&p1.points, &p2.points) {
// TODO could do something more clever later to avoid double memory
adjacency.insert(p1_idx, p2_idx);
adjacency.insert(p2_idx, p1_idx);
adj_counter += 1;
}
}
}
println!(
"{} adjacencies, now doing floodfilling to group them",
adj_counter
);
// Union-find might also be good inspiration.
fn floodfill(from: ParcelIdx, adj: &MultiMap<ParcelIdx, ParcelIdx>) -> BTreeSet<ParcelIdx> {
let mut visited: BTreeSet<ParcelIdx> = BTreeSet::new();
let mut queue: Vec<ParcelIdx> = vec![from];
while !queue.is_empty() {
let current = queue.pop().unwrap();
if visited.contains(&current) {
continue;
}
visited.insert(current);
for next in adj.get(current).iter() {
queue.push(*next);
}
}
visited
}
let mut block_per_parcel: Vec<Option<usize>> = Vec::new();
for _ in parcels.iter() {
block_per_parcel.push(None);
}
let mut block_counter = 0;
for base_idx in 0..parcels.len() {
// A previous iteration might have filled it out
if block_per_parcel[base_idx].is_some() {
continue;
}
let new_block = Some(block_counter);
block_counter += 1;
for idx in floodfill(base_idx, &adjacency).iter() {
assert!(!block_per_parcel[*idx].is_some());
block_per_parcel[*idx] = new_block;
}
}
println!(
"{} parcels grouped into {} blocks",
parcels.len(),
block_counter
);
for (idx, block) in block_per_parcel.iter().enumerate() {
parcels[idx].block = block.unwrap();
}
}
fn polygons_intersect(pts1: &Vec<LonLat>, pts2: &Vec<LonLat>) -> bool {
use geo::prelude::Intersects;
let poly1 = geo::Polygon::new(
pts1.iter()
.map(|pt| geo::Point::new(pt.longitude, pt.latitude))
.collect(),
Vec::new(),
);
let poly2 = geo::Polygon::new(
pts2.iter()
.map(|pt| geo::Point::new(pt.longitude, pt.latitude))
.collect(),
Vec::new(),
);
poly1.intersects(&poly2)
}

View File

@ -1,6 +1,7 @@
extern crate abstutil;
extern crate byteorder;
extern crate dimensioned;
extern crate geo;
extern crate geom;
extern crate map_model;
extern crate ordered_float;
@ -9,6 +10,7 @@ extern crate shp;
#[macro_use]
extern crate structopt;
mod group_parcels;
mod osm;
mod remove_disconnected;
mod split_ways;
@ -69,6 +71,7 @@ pub fn convert(flags: &Flags) -> raw_data::Map {
map.parcels.push(p);
}
}
group_parcels::group_parcels(&mut map.parcels);
for coord in
&traffic_signals::extract(&flags.traffic_signals).expect("loading traffic signals failed")

View File

@ -36,7 +36,10 @@ pub fn load(
// interpret parsing failures appropriately though...
if text.contains(" ") {
let mut ok = true;
let mut parcel = map_model::raw_data::Parcel { points: Vec::new() };
let mut parcel = map_model::raw_data::Parcel {
points: Vec::new(),
block: 0,
};
for pt in text.split(" ") {
if let Some((lon, lat)) = parse_pt(pt) {
if b.contains(lon, lat) {

View File

@ -1,7 +1,6 @@
// Copyright 2018 Google LLC, licensed under http://www.apache.org/licenses/LICENSE-2.0
use aabb_quadtree::geom::{Point, Rect};
use geo;
use geom::{Angle, Bounds, PolyLine, Pt2D};
use graphics::math::Vec2d;
use std::f64;
@ -115,19 +114,3 @@ pub fn regular_polygon(center: Pt2D, sides: usize, length: f64) -> Vec<Pt2D> {
pts.push(first_pt);
pts
}
pub fn polygons_intersect(pts1: &Vec<Pt2D>, pts2: &Vec<Pt2D>) -> bool {
use geo::prelude::Intersects;
let poly1 = geo::Polygon::new(
pts1.iter()
.map(|pt| geo::Point::new(pt.x(), pt.y()))
.collect(),
Vec::new());
let poly2 = geo::Polygon::new(
pts2.iter()
.map(|pt| geo::Point::new(pt.x(), pt.y()))
.collect(),
Vec::new());
poly1.intersects(&poly2)
}

View File

@ -1,6 +1,5 @@
mod buildings;
mod lanes;
mod parcels;
mod trim_lines;
mod turns;
@ -8,4 +7,3 @@ pub(crate) use self::buildings::make_building;
pub(crate) use self::lanes::get_lane_specs;
pub(crate) use self::trim_lines::trim_lines;
pub(crate) use self::turns::make_all_turns;
pub(crate) use self::parcels::group_parcels;

View File

@ -1,64 +0,0 @@
use {Parcel, ParcelID, geometry};
use std::collections::BTreeSet;
use abstutil::MultiMap;
pub fn group_parcels(parcels: &mut Vec<Parcel>) {
// First compute which parcels intersect
let mut adjacency: MultiMap<ParcelID, ParcelID> = MultiMap::new();
// TODO could use quadtree to prune
println!("Precomputing adjacency between {} parcels...", parcels.len());
let mut adj_counter = 0;
for p1 in parcels.iter() {
for p2 in parcels.iter() {
if p1.id < p2.id && geometry::polygons_intersect(&p1.points, &p2.points) {
// TODO could do something more clever later to avoid double memory
adjacency.insert(p1.id, p2.id);
adjacency.insert(p2.id, p1.id);
adj_counter += 1;
}
}
}
println!("{} adjacencies, now doing floodfilling to group them", adj_counter);
// Union-find might also be good inspiration.
fn floodfill(from: ParcelID, adj: &MultiMap<ParcelID, ParcelID>) -> BTreeSet<ParcelID> {
let mut visited: BTreeSet<ParcelID> = BTreeSet::new();
let mut queue: Vec<ParcelID> = vec![from];
while !queue.is_empty() {
let current = queue.pop().unwrap();
if visited.contains(&current) {
continue;
}
visited.insert(current);
for next in adj.get(current).iter() {
queue.push(*next);
}
}
visited
}
let mut block_per_parcel: Vec<Option<usize>> = Vec::new();
for _ in parcels.iter() {
block_per_parcel.push(None);
}
let mut block_counter = 0;
for base_p in parcels.iter() {
// A previous iteration might have filled it out
if block_per_parcel[base_p.id.0].is_some() {
continue;
}
let new_block = Some(block_counter);
block_counter += 1;
for p in floodfill(base_p.id, &adjacency).iter() {
assert!(!block_per_parcel[p.0].is_some());
block_per_parcel[p.0] = new_block;
}
}
println!("{} parcels grouped into {} blocks", parcels.len(), block_counter);
for (idx, block) in block_per_parcel.iter().enumerate() {
parcels[idx].block = block.unwrap();
}
}

View File

@ -161,10 +161,9 @@ impl Map {
.iter()
.map(|coord| Pt2D::from_gps(coord, &bounds))
.collect(),
block: 0,
block: p.block,
});
}
make::group_parcels(&mut m.parcels);
Ok(m)
}

View File

@ -83,4 +83,5 @@ pub struct Parcel {
// last point never the first?
pub points: Vec<LonLat>,
// TODO decide what metadata from the shapefile is useful
pub block: usize,
}