use std::collections::HashMap; use std::env::args; use std::fs::File; use std::io::{Seek, SeekFrom, BufReader}; use std::time::Instant; use std::sync::mpsc::{sync_channel, Receiver}; use std::sync::Arc; use std::thread; use osm_pbf_iter::*; pub struct PrimSource { req_rx: Receiver } impl PrimSource { pub fn recv_primitives R, R>(&self, f: F) -> Option { self.req_rx.recv() .ok() .map(|blob| { let data = blob.into_data(); let primitive_block = PrimitiveBlock::parse(&data); f(primitive_block.primitives()) }) } } fn process_osm R + 'static + Send + Clone, R: Send + 'static>(filename: &str, f: F) -> Vec { let cpus = num_cpus::get(); let mut worker_results = Vec::with_capacity(cpus); // start workers let mut workers = Vec::with_capacity(cpus); for _ in 0..cpus { let (req_tx, req_rx) = sync_channel::(2); let (res_tx, res_rx) = sync_channel::(1); workers.push((req_tx, res_rx)); let f = f.clone(); thread::spawn(move || { let prim_src = PrimSource { req_rx }; let result = f(prim_src); res_tx.send(result).unwrap(); }); } // open file println!("Open {}", filename); let f = File::open(filename).unwrap(); let mut reader = BlobReader::new(BufReader::new(f)); let start = Instant::now(); // feed let mut w = 0; for blob in &mut reader { let req_tx = &workers[w].0; w = (w + 1) % cpus; req_tx.send(blob).unwrap(); } // receive results for (req_tx, res_rx) in workers.into_iter() { drop(req_tx); let worker_res = res_rx.recv().unwrap(); worker_results.push(worker_res); } // stats let stop = Instant::now(); let duration = stop.duration_since(start); let duration = duration.as_secs() as f64 + (duration.subsec_nanos() as f64 / 1e9); let mut f = reader.into_inner(); match f.seek(SeekFrom::Current(0)) { Ok(pos) => { let rate = pos as f64 / 1024f64 / 1024f64 / duration; println!("Processed {} MB in {:.2} seconds ({:.2} MB/s)", pos / 1024 / 1024, duration, rate); }, Err(_) => (), } worker_results } fn main() { let mut node_coords: HashMap = HashMap::new(); // phase 1: nodes for arg in args().skip(1) { let worker_res = process_osm(&arg, move |prim_src| { let mut res = HashMap::new(); while prim_src.recv_primitives(|iter| { for primitive in iter { match primitive { Primitive::Node(node) => { res.insert(node.id as i64, (node.lon, node.lat)); } Primitive::Way(_) => {} Primitive::Relation(_) => {} } } true }).unwrap_or(false) {} res }); for mut res in worker_res { if node_coords.is_empty() { node_coords = res; } else { // merge for (id, coords) in res.drain() { node_coords.insert(id, coords); } } } } println!("{} nodes", node_coords.len()); let node_coords = Arc::new(node_coords); let mut way_coords: HashMap> = HashMap::new(); // phase 2: ways for arg in args().skip(1) { let node_coords = node_coords.clone(); let worker_res = process_osm(&arg, move |prim_src| { const DB_URL: &str = "host=10.233.1.2 dbname=treeadvisor user=treeadvisor password=123"; let mut db = postgres::Client::connect(DB_URL, postgres::NoTls) .expect("DB"); let mut res = HashMap::new(); let mut running = true; while running { running = prim_src.recv_primitives(|iter| { let mut tx = db.transaction().unwrap(); for primitive in iter { match primitive { Primitive::Node(_) => {} Primitive::Way(way) => { let tags: serde_json::Map = way.tags() .map(|(k, v)| (k.to_string(), serde_json::Value::String(v.to_string()))) .collect(); let points = way.refs() .filter_map(|id| node_coords.get(&id)) .cloned() .collect::>(); tx.execute( "INSERT INTO osm_ways (geo, id, attrs) VALUES ($1, $2, $3)", &[&geo::LineString::from(points.clone()), &(way.id as i64), &serde_json::Value::Object(tags)] ).unwrap(); res.insert(way.id as i64, points); } Primitive::Relation(_) => {} } } tx.commit().unwrap(); true }).unwrap_or(false); } res }); for mut res in worker_res { if way_coords.is_empty() { way_coords = res; } else { // merge for (id, coords) in res.drain() { way_coords.insert(id, coords); } } } } let way_coords = Arc::new(way_coords); // phase 3: rels (TODO) }