summaryrefslogtreecommitdiff
path: root/2023/src/bin/day_3.rs
diff options
context:
space:
mode:
Diffstat (limited to '2023/src/bin/day_3.rs')
-rw-r--r--2023/src/bin/day_3.rs149
1 files changed, 149 insertions, 0 deletions
diff --git a/2023/src/bin/day_3.rs b/2023/src/bin/day_3.rs
new file mode 100644
index 0000000..06e1300
--- /dev/null
+++ b/2023/src/bin/day_3.rs
@@ -0,0 +1,149 @@
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::{digit1, line_ending, none_of},
+ combinator::{map, map_res},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_3.txt")?;
+ let parsed = PartInventory::parser(&input).unwrap().1;
+ dbg!(&parsed.part_number_sum());
+ dbg!(&parsed.gear_ratio_sum());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct PartInventory {
+ parts: Vec<Part>,
+ symbols: Vec<Symbol>,
+}
+
+#[derive(Debug)]
+struct Part {
+ number: u32,
+ symbols: Vec<char>,
+ y: usize,
+ min_x: usize,
+ max_x: usize,
+}
+
+#[derive(Debug, Clone)]
+struct Symbol {
+ symbol: char,
+ parts: Vec<u32>,
+ x: usize,
+ y: usize,
+}
+
+#[derive(Debug)]
+enum LexToken {
+ Space(usize),
+ Part(usize, u32),
+ Symbol(char),
+}
+
+impl PartInventory {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(LexToken::parser, |tokens| {
+ let mut parts = Vec::new();
+ let mut symbols = Vec::new();
+
+ for (y, row) in tokens.iter().enumerate() {
+ let mut x = 0;
+ for token in row {
+ match token {
+ LexToken::Space(_) => {}
+ LexToken::Part(len, number) => parts.push(Part {
+ number: *number,
+ symbols: Vec::new(),
+ y,
+ min_x: x,
+ max_x: x + len - 1,
+ }),
+ LexToken::Symbol(symbol) => symbols.push(Symbol {
+ symbol: *symbol,
+ parts: Vec::new(),
+ y,
+ x,
+ }),
+ }
+ x += token.len();
+ }
+ }
+
+ for part in &mut parts {
+ part.symbols = symbols
+ .iter()
+ .filter(|symbol| part.touches(symbol))
+ .map(|symbol| symbol.symbol)
+ .collect();
+ }
+
+ for symbol in &mut symbols {
+ symbol.parts = parts
+ .iter()
+ .filter(|part| part.touches(symbol))
+ .map(|part| part.number)
+ .collect();
+ }
+
+ PartInventory { parts, symbols }
+ })(input)
+ }
+
+ fn part_number_sum(&self) -> u32 {
+ self.parts
+ .iter()
+ .filter(|part| part.symbols.len() > 0)
+ .map(|part| part.number)
+ .sum()
+ }
+
+ fn gear_ratio_sum(&self) -> u32 {
+ self.symbols
+ .iter()
+ .filter(|symbol| symbol.symbol == '*' && symbol.parts.len() == 2)
+ .map(|symbol| symbol.parts[0] * symbol.parts[1])
+ .sum()
+ }
+}
+
+impl LexToken {
+ fn parser(input: &str) -> IResult<&str, Vec<Vec<Self>>> {
+ separated_list1(
+ line_ending,
+ many1(alt((
+ map(many1(tag(".")), |dots| LexToken::Space(dots.len())),
+ map_res(digit1, |num_s: &str| {
+ num_s
+ .parse()
+ .map(|num_i| LexToken::Part(num_s.len(), num_i))
+ }),
+ map(none_of("\n"), |s| LexToken::Symbol(s)),
+ ))),
+ )(input)
+ }
+
+ fn len(&self) -> usize {
+ match self {
+ Self::Space(len) => *len,
+ Self::Part(len, _) => *len,
+ Self::Symbol(_) => 1,
+ }
+ }
+}
+
+impl Part {
+ fn touches(&self, symbol: &Symbol) -> bool {
+ let part = self;
+ symbol.x >= part.min_x.saturating_sub(1)
+ && symbol.x <= part.max_x.saturating_add(1)
+ && symbol.y >= part.y.saturating_sub(1)
+ && symbol.y <= part.y.saturating_add(1)
+ }
+}