This commit is contained in:
Acvaxoort 2023-12-27 18:02:21 +01:00
parent d6ae1b03b1
commit 00bcb17d76
4 changed files with 1497 additions and 0 deletions

75
day25/Cargo.lock generated Normal file
View File

@ -0,0 +1,75 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "day25"
version = "0.1.0"
dependencies = [
"rand",
]
[[package]]
name = "getrandom"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "libc"
version = "0.2.151"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4"
[[package]]
name = "ppv-lite86"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"

9
day25/Cargo.toml Normal file
View File

@ -0,0 +1,9 @@
[package]
name = "day25"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
rand = "0.8.5"

1262
day25/input.txt Normal file

File diff suppressed because it is too large Load Diff

151
day25/src/main.rs Normal file
View File

@ -0,0 +1,151 @@
use std::cell::RefCell;
use std::collections::{BTreeSet, HashMap};
use std::fs::read_to_string;
use std::mem;
use std::time::Instant;
// Returns the weight of minimum cut
// and a vector containing node indices of one of the resulting graphs after the cut
fn stoer_wagner_cut(graph: &Vec<Vec<u16>>) -> (u16, Vec<u16>) {
if graph.len() < 2 {
return (0, vec![]);
}
// the original graph with added edge weight values
let mut modified_graph: Vec<Vec<(u16, u16)>> = graph.iter()
.map(|neighbours| neighbours.iter()
.map(|&elem| (elem, 1u16))
.collect::<Vec<_>>()
).collect::<Vec<_>>();
// The node indices that haven't been removed through merging yet
let mut remaining_nodes: Vec<u16> = (0..modified_graph.len() as u16).collect();
// Min cut weight so far and node that was cut off during it
let mut best_cut_weight: u16 = u16::MAX;
let mut best_cut_node: u16 = 0;
// priority queue for finding the most tightly connected node, the highest value is next
// the keys are graph_size * weight + index, so there's no collision for the same weights
// a fibonacci heap could be better but not strictly needed and I'd rather minimize imports
let graph_size = modified_graph.len() as u32;
let mut search_candidates_queue: BTreeSet<u32> = BTreeSet::new();
// keeping track of priority queue indices so it can be removed and re added
let mut search_candidates_keys: Vec<u32> = vec![u32::MAX; remaining_nodes.len()];
// keeping track of what nodes were merged with what
let mut merge_tree: Vec<Vec<u16>> = vec![vec![]; modified_graph.len()];
while remaining_nodes.len() > 1 {
// the nodes that are cut second to last and last, weight of their cut
let mut s = remaining_nodes[0];
let mut t = s;
let mut cut_of_the_phase = 0;
// min cut phase - preparing the queue
for &idx in &remaining_nodes[1..remaining_nodes.len()] {
let weight = match modified_graph[remaining_nodes[0] as usize].iter().find(
|&(edge_to, _)| *edge_to == idx) {
None => 0,
Some(&(_, edge_weight)) => edge_weight
};
let key = graph_size * weight as u32 + idx as u32;
search_candidates_keys[idx as usize] = key;
search_candidates_queue.insert(key);
}
// min cut phase
while !search_candidates_queue.is_empty() {
let max_key = search_candidates_queue.pop_last().unwrap();
let idx = (max_key % graph_size) as u16;
let max_weight = (max_key / graph_size) as u16;
search_candidates_keys[idx as usize] = u32::MAX;
s = t;
t = idx;
cut_of_the_phase = max_weight;
for &(that_idx, that_weight) in modified_graph[idx as usize].iter() {
let mut new_key = search_candidates_keys[that_idx as usize];
if new_key != u32::MAX {
search_candidates_queue.remove(&new_key);
new_key += graph_size * that_weight as u32;
search_candidates_queue.insert(new_key);
search_candidates_keys[that_idx as usize] = new_key;
}
}
}
// store information that t was merged into s
merge_tree[s as usize].push(t);
// remove the last node
let mut t_node = vec![];
mem::swap(&mut t_node, &mut modified_graph[t as usize]);
remaining_nodes.retain(|&idx| idx != t);
// merge s and t into s, update other nodes connecting to s or t
for &(edge_to, edge_weight) in t_node.iter() {
if edge_to == s {
modified_graph[edge_to as usize].retain(|(to, _)| *to != t);
continue;
}
match modified_graph[s as usize].iter().position(|&(edge_to2, _)| edge_to2 == edge_to) {
None => {
modified_graph[s as usize].push((edge_to, edge_weight));
modified_graph[edge_to as usize].iter_mut().find(|(to, _)| *to == t).unwrap().0 = s;
}
Some(pos) => {
modified_graph[s as usize][pos].1 += edge_weight;
modified_graph[edge_to as usize].retain(|(to, _)| *to != t);
modified_graph[edge_to as usize].iter_mut().find(|(to, _)| *to == s).unwrap().1 += edge_weight;
}
}
}
// if this cut was the best so far, remember it
if cut_of_the_phase < best_cut_weight {
best_cut_weight = cut_of_the_phase;
best_cut_node = t;
}
}
// gather all nodes that were merged into the best cut node and return them
fn add_branch(merge_tree: &Vec<Vec<u16>>, result_nodes: &mut Vec<u16>, idx: u16) {
result_nodes.push(idx);
for &v in &merge_tree[idx as usize] {
add_branch(merge_tree, result_nodes, v);
}
}
let mut result_nodes = vec![];
add_branch(&merge_tree, &mut result_nodes, best_cut_node);
(best_cut_weight, result_nodes)
}
// returns a graph in form of edge list for each node and a map of node names to indices
fn parse_graph(input_str: &str) -> (Vec<Vec<u16>>, HashMap<String, u16>) {
let mut node_index: HashMap<String, u16> = HashMap::new();
let graph: RefCell<Vec<Vec<u16>>> = RefCell::new(vec![]);
let mut get_node_index = |name: &str| -> u16 {
match node_index.entry(name.parse().unwrap()) {
std::collections::hash_map::Entry::Occupied(occupied_entry) => {
return *occupied_entry.get();
}
std::collections::hash_map::Entry::Vacant(vacant_entry) => {
let new_index = graph.borrow().len() as u16;
graph.borrow_mut().push(vec![]);
vacant_entry.insert(new_index);
return new_index;
}
}
};
for line in input_str.lines() {
let mut iter = line.split([' ', ':']).filter(|&s| !s.is_empty());
let first = get_node_index(iter.next().unwrap());
for other_name in iter {
let other = get_node_index(other_name);
graph.borrow_mut()[first as usize].push(other);
graph.borrow_mut()[other as usize].push(first);
}
}
(graph.into_inner(), node_index)
}
fn main() {
let time_start = Instant::now();
let input_str = read_to_string("input.txt").unwrap();
let time_start_no_io = Instant::now();
let (graph, _node_mapping) = parse_graph(&input_str);
let (_cut_weight, cut_nodes) = stoer_wagner_cut(&graph);
let multiplied_split = cut_nodes.len() * (graph.len() - cut_nodes.len());
let elapsed = time_start.elapsed().as_micros();
let elapsed_no_io = time_start_no_io.elapsed().as_micros();
println!("Time: {}us", elapsed);
println!("Time without file i/o: {}us", elapsed_no_io);
println!("Cut into {}, {}, multiplied: {}", cut_nodes.len(), graph.len() - cut_nodes.len(), multiplied_split);
}