diff --git a/src/parser/utils.rs b/src/parser/utils.rs index 442a7d3..451614a 100644 --- a/src/parser/utils.rs +++ b/src/parser/utils.rs @@ -18,6 +18,7 @@ impl, P: Parser> TokenParser for P pub trait OrDefaultParser>: Parser + Sized { + #[allow(clippy::type_complexity)] fn or_default(self) -> Map, fn(Option) -> O, Option> { self.or_not().map(Option::unwrap_or_default) } diff --git a/src/scheduling/astar.rs b/src/scheduling/astar.rs index 5dbfdaf..2e7656a 100644 --- a/src/scheduling/astar.rs +++ b/src/scheduling/astar.rs @@ -131,21 +131,10 @@ pub struct Explored { type ExploredMap = HashMap>; type ScheduleQueue = BinaryHeap; -struct FastHasher(ahash::AHasher); - -impl FastHasher { - fn new() -> Self { - Self(ahash::AHasher::default()) - } - - fn hash_one_off(&mut self, value: &T) -> u64 { - let buf = &mut self.0 as *mut ahash::AHasher as *mut u64; - unsafe { *buf = 0 }; - - value.hash(&mut self.0); - - self.0.finish() - } +fn hash_one_off(value: &T) -> u64 { + let mut hashooor = ahash::AHasher::default(); + value.hash(&mut hashooor); + hashooor.finish() } #[derive(Clone, Debug, Default)] @@ -191,12 +180,10 @@ pub trait AStarScheduler: Sized + Sync + Send { at_end: start.all_done(), }); - let mut hasher = FastHasher::new(); - // 1. Pop top of priority queue (node closest to end according to actual cost + estimated // remaining distance). while let Some(mut node) = queue.pop() { - let came_from = hasher.hash_one_off(&node.state); + let came_from = hash_one_off(&node.state); // 2a. If the shortest node is the end we know we found our solution, accumulate the // steps and return. if node.at_end { @@ -235,13 +222,13 @@ pub trait AStarScheduler: Sized + Sync + Send { } let new_cost = node.cost + steps.iter().map(|step| step.cost()).sum::(); tracker.total_explored += 1; - let new_state_hash = hasher.hash_one_off(&new_state); + let new_state_hash = hash_one_off(&new_state); - let new_cost_better = match explored.get(&new_state_hash) { + match explored.get(&new_state_hash) { Some(e) => new_cost < e.cost, None => true, - }; - if new_cost_better { + } + .then(|| { let out = explored.insert( new_state_hash, Explored { @@ -252,14 +239,13 @@ pub trait AStarScheduler: Sized + Sync + Send { ); tracker.total_collisions += if out.is_some() { 1 } else { 0 }; let score = new_cost + self.estimate_remaining_cost(info, &new_state, new_cost); - return Some(ScheduleNode { + ScheduleNode { state: new_state, cost: new_cost, score, at_end, - }); - } - None + } + }) })); } diff --git a/src/transformer/ir_gen.rs b/src/transformer/ir_gen.rs index 6ba2d5d..3a95198 100644 --- a/src/transformer/ir_gen.rs +++ b/src/transformer/ir_gen.rs @@ -126,18 +126,18 @@ impl SemanticContext { /// Returns the IDs of the nodes that the newly inserted node is now dependent on (non-operand /// semantic dependency). - pub fn record_write(&mut self, dependency: &String, id: CompNodeId) { + pub fn record_write(&mut self, dependency: &str, id: CompNodeId) { let mut pre_deps = self .last_reads - .insert(dependency.clone(), Vec::new()) + .insert(dependency.to_string(), Vec::new()) .unwrap_or_default(); - pre_deps.extend(self.last_write.insert(dependency.clone(), id)); + pre_deps.extend(self.last_write.insert(dependency.to_string(), id)); self.nodes_sources[id].0.post.extend(pre_deps); } } -fn unspan(spanned: &Vec>) -> Vec { +fn unspan(spanned: &[Spanned]) -> Vec { spanned.iter().map(Spanned::unwrap_ref).cloned().collect() } @@ -235,7 +235,7 @@ fn set_blocked_count(input_ids: &[CompNodeId], output_ids: &[CompNodeId], nodes: let total = nodes.len(); let mut blocked_by = vec![0u32; total]; - let mut stack_count = vec![0u32; total]; + let mut stack_counts = vec![0u32; total]; for node in nodes.iter() { for post_id in node.post.iter() { @@ -244,23 +244,22 @@ fn set_blocked_count(input_ids: &[CompNodeId], output_ids: &[CompNodeId], nodes: for dep_id in node.operands.iter() { blocked_by[*dep_id] += 1; // Blocked once as an argument. - stack_count[*dep_id] += 1; + stack_counts[*dep_id] += 1; } } for output_id in output_ids.iter() { - stack_count[*output_id] += 1; + stack_counts[*output_id] += 1; } for id in 0..total { - let required_dedups = stack_count[id].max(1) - 1; + let required_dedups = stack_counts[id].max(1) - 1; *nodes[id].blocked_by.as_mut().unwrap() += required_dedups + blocked_by[id]; } - for id in 0..total { - if nodes[id].blocked_by.unwrap() == 0 && input_ids.contains(&id) && output_ids.contains(&id) - { - nodes[id].blocked_by = None; + for (id, node) in nodes.iter_mut().enumerate() { + if node.blocked_by.unwrap() == 0 && input_ids.contains(&id) && output_ids.contains(&id) { + node.blocked_by = None; } } }