summaryrefslogtreecommitdiff
path: root/2023/src/bin
diff options
context:
space:
mode:
Diffstat (limited to '2023/src/bin')
-rw-r--r--2023/src/bin/day_1.rs81
-rw-r--r--2023/src/bin/day_10.rs321
-rw-r--r--2023/src/bin/day_11.rs111
-rw-r--r--2023/src/bin/day_12.rs323
-rw-r--r--2023/src/bin/day_13.rs82
-rw-r--r--2023/src/bin/day_14.rs145
-rw-r--r--2023/src/bin/day_15.rs153
-rw-r--r--2023/src/bin/day_16.rs209
-rw-r--r--2023/src/bin/day_17.rs144
-rw-r--r--2023/src/bin/day_18.rs214
-rw-r--r--2023/src/bin/day_19.rs348
-rw-r--r--2023/src/bin/day_2.rs109
-rw-r--r--2023/src/bin/day_20.rs334
-rw-r--r--2023/src/bin/day_21.rs395
-rw-r--r--2023/src/bin/day_22.rs161
-rw-r--r--2023/src/bin/day_23.rs218
-rw-r--r--2023/src/bin/day_24.rs242
-rw-r--r--2023/src/bin/day_25.rs154
-rw-r--r--2023/src/bin/day_3.rs149
-rw-r--r--2023/src/bin/day_4.rs92
-rw-r--r--2023/src/bin/day_5.rs257
-rw-r--r--2023/src/bin/day_6.rs70
-rw-r--r--2023/src/bin/day_7.rs169
-rw-r--r--2023/src/bin/day_8.rs213
-rw-r--r--2023/src/bin/day_9.rs81
25 files changed, 4775 insertions, 0 deletions
diff --git a/2023/src/bin/day_1.rs b/2023/src/bin/day_1.rs
new file mode 100644
index 0000000..fe3ef1e
--- /dev/null
+++ b/2023/src/bin/day_1.rs
@@ -0,0 +1,81 @@
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::{alpha1, anychar, line_ending, none_of, one_of},
+ combinator::{map, map_opt, map_res, opt, peek, value},
+ multi::{many1, separated_list1},
+ sequence::pair,
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_1.txt")?;
+ let parsed = CalibrationInput::parser(&input).unwrap().1;
+ dbg!(parsed.calibration_sum());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct CalibrationInput(Vec<CalibrationValue>);
+
+#[derive(Debug)]
+struct CalibrationValue(u32);
+
+#[derive(Debug)]
+struct CalibrationDigit(u8);
+
+impl CalibrationInput {
+ fn calibration_sum(&self) -> u32 {
+ self.0.iter().map(|d| d.0).sum()
+ }
+
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, CalibrationValue::parser),
+ CalibrationInput,
+ )(input)
+ }
+}
+
+impl CalibrationValue {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ many1(pair(
+ opt(peek(CalibrationDigit::parser)), // peek is to not consume the 'one' in 'twone'
+ none_of("\n"), // this is just to advance the input
+ )),
+ |digits| {
+ let digits: Vec<u32> = digits
+ .into_iter()
+ .filter_map(|(d, _)| d.map(|d| d.0 as u32))
+ .collect();
+ CalibrationValue(digits[0] * 10 + digits[digits.len() - 1])
+ },
+ )(input)
+ }
+}
+
+impl CalibrationDigit {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ alt((
+ map(one_of("0123456789"), |c| {
+ c.to_string().parse::<u8>().unwrap()
+ }),
+ value(0, tag("zero")),
+ value(1, tag("one")),
+ value(2, tag("two")),
+ value(3, tag("three")),
+ value(4, tag("four")),
+ value(5, tag("five")),
+ value(6, tag("six")),
+ value(7, tag("seven")),
+ value(8, tag("eight")),
+ value(9, tag("nine")),
+ )),
+ CalibrationDigit,
+ )(input)
+ }
+}
diff --git a/2023/src/bin/day_10.rs b/2023/src/bin/day_10.rs
new file mode 100644
index 0000000..84d6327
--- /dev/null
+++ b/2023/src/bin/day_10.rs
@@ -0,0 +1,321 @@
+use nom::{
+ branch::alt,
+ character::complete::{char as nom_char, line_ending},
+ combinator::{map, value},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_10.txt")?;
+ let parsed = Maze::parser(&input).unwrap().1;
+ dbg!(&parsed.find_furthest_point_in_loop());
+ dbg!(&parsed.count_blocks_inside_the_pipe_loop());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct Maze(Vec<Vec<Pipe>>);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum Pipe {
+ Start,
+ Nothing,
+ DownLeft,
+ DownRight,
+ DownUp,
+ LeftRight,
+ UpLeft,
+ UpRight,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum Direction {
+ Up,
+ Down,
+ Left,
+ Right,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+struct Point {
+ x: usize,
+ y: usize,
+}
+
+#[derive(Debug)]
+struct FloodFillMaze(Vec<Vec<Option<FloodFill>>>);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum FloodFill {
+ Outside,
+ Inside,
+ Pipe,
+}
+
+impl Maze {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(line_ending, many1(Pipe::parser)), Maze)(input)
+ }
+
+ fn find_furthest_point_in_loop(&self) -> usize {
+ self.measure_loop_size() / 2
+ }
+
+ fn measure_loop_size(&self) -> usize {
+ let mut position = self.find_start();
+ let mut facing = self.find_start_facing(position);
+ position = position.go(facing);
+ let mut distance = 1;
+
+ while self.at(position) != Pipe::Start {
+ let current_pipe = self.at(position);
+ facing = current_pipe.exit_facing(facing).unwrap();
+ position = position.go(facing);
+ distance += 1;
+ }
+
+ distance
+ }
+
+ fn count_blocks_inside_the_pipe_loop(&self) -> usize {
+ let mut flood_fill_maze = FloodFillMaze::init_for_maze(&self);
+
+ let start_position = self.find_start();
+
+ {
+ // fill the pipe loop
+ let mut position = start_position;
+ flood_fill_maze.fill(position, FloodFill::Pipe);
+
+ let mut facing = self.find_start_facing(position);
+ position = position.go(facing);
+
+ while self.at(position) != Pipe::Start {
+ flood_fill_maze.fill(position, FloodFill::Pipe);
+ facing = self.at(position).exit_facing(facing).unwrap();
+ position = position.go(facing);
+ }
+ }
+
+ for y in 0..self.0.len() {
+ for x in 0..self.0[y].len() {
+ if flood_fill_maze.0[y][x].is_none() {
+ let trace_range = if y <= start_position.y {
+ 0..y
+ } else {
+ y + 1..self.0.len()
+ };
+ let mut outside = true;
+ for trace_y in trace_range {
+ if flood_fill_maze.0[trace_y][x] == Some(FloodFill::Pipe)
+ && self
+ .at(Point { x, y: trace_y })
+ .crossed_by_vertical_hanging_right()
+ {
+ outside = !outside;
+ }
+ }
+ flood_fill_maze.fill(
+ Point { x, y },
+ if outside {
+ FloodFill::Outside
+ } else {
+ FloodFill::Inside
+ },
+ );
+ }
+ }
+ }
+
+ flood_fill_maze
+ .0
+ .iter()
+ .flat_map(|row| row.iter())
+ .filter(|x| **x == Some(FloodFill::Inside))
+ .count()
+ }
+
+ fn at(&self, p: Point) -> Pipe {
+ self.0[p.y][p.x]
+ }
+
+ fn find_start(&self) -> Point {
+ for (y, row) in self.0.iter().enumerate() {
+ for (x, pipe) in row.iter().enumerate() {
+ if *pipe == Pipe::Start {
+ return Point { x, y };
+ }
+ }
+ }
+ panic!("No Start!");
+ }
+
+ fn find_start_facing(&self, start: Point) -> Direction {
+ if start.y > 0 && self.at(start.up()).connections().contains(&Direction::Down) {
+ Direction::Up
+ } else if start.y < self.0.len() - 1
+ && self.at(start.down()).connections().contains(&Direction::Up)
+ {
+ Direction::Down
+ } else if start.x > 0
+ && self
+ .at(start.left())
+ .connections()
+ .contains(&Direction::Right)
+ {
+ Direction::Left
+ } else if start.x < self.0[start.y].len() - 1
+ && self
+ .at(start.right())
+ .connections()
+ .contains(&Direction::Left)
+ {
+ Direction::Right
+ } else {
+ panic!()
+ }
+ }
+}
+
+impl Pipe {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ use Pipe::*;
+
+ alt((
+ value(Start, nom_char('S')),
+ value(Nothing, nom_char('.')),
+ value(DownLeft, nom_char('7')),
+ value(DownRight, nom_char('F')),
+ value(DownUp, nom_char('|')),
+ value(LeftRight, nom_char('-')),
+ value(UpLeft, nom_char('J')),
+ value(UpRight, nom_char('L')),
+ ))(input)
+ }
+
+ fn connections(&self) -> Vec<Direction> {
+ use Direction::*;
+
+ match self {
+ Pipe::Start => vec![],
+ Pipe::Nothing => vec![],
+ Pipe::DownLeft => vec![Down, Left],
+ Pipe::DownRight => vec![Down, Right],
+ Pipe::DownUp => vec![Down, Up],
+ Pipe::LeftRight => vec![Left, Right],
+ Pipe::UpLeft => vec![Up, Left],
+ Pipe::UpRight => vec![Up, Right],
+ }
+ }
+
+ fn exit_facing(&self, facing: Direction) -> Option<Direction> {
+ use Direction::*;
+
+ match self {
+ Pipe::Start => None,
+ Pipe::Nothing => None,
+ Pipe::DownLeft => match facing {
+ Up => Some(Left),
+ Right => Some(Down),
+ _ => None,
+ },
+ Pipe::DownRight => match facing {
+ Up => Some(Right),
+ Left => Some(Down),
+ _ => None,
+ },
+ Pipe::DownUp => match facing {
+ Up => Some(Up),
+ Down => Some(Down),
+ _ => None,
+ },
+ Pipe::LeftRight => match facing {
+ Left => Some(Left),
+ Right => Some(Right),
+ _ => None,
+ },
+ Pipe::UpLeft => match facing {
+ Down => Some(Left),
+ Right => Some(Up),
+ _ => None,
+ },
+ Pipe::UpRight => match facing {
+ Down => Some(Right),
+ Left => Some(Up),
+ _ => None,
+ },
+ }
+ }
+
+ fn crossed_by_vertical_hanging_right(&self) -> bool {
+ match self {
+ Pipe::Start => panic!("Undefined crossing over the start"),
+ Pipe::Nothing => false,
+ Pipe::DownLeft => false,
+ Pipe::DownRight => true,
+ Pipe::DownUp => false,
+ Pipe::LeftRight => true,
+ Pipe::UpLeft => false,
+ Pipe::UpRight => true,
+ }
+ }
+}
+
+impl Point {
+ fn up(&self) -> Point {
+ Point {
+ x: self.x,
+ y: self.y - 1,
+ }
+ }
+
+ fn down(&self) -> Point {
+ Point {
+ x: self.x,
+ y: self.y + 1,
+ }
+ }
+
+ fn left(&self) -> Point {
+ Point {
+ x: self.x - 1,
+ y: self.y,
+ }
+ }
+
+ fn right(&self) -> Point {
+ Point {
+ x: self.x + 1,
+ y: self.y,
+ }
+ }
+
+ fn go(&self, facing: Direction) -> Point {
+ match facing {
+ Direction::Up => self.up(),
+ Direction::Down => self.down(),
+ Direction::Left => self.left(),
+ Direction::Right => self.right(),
+ }
+ }
+}
+
+impl FloodFillMaze {
+ fn init_for_maze(maze: &Maze) -> FloodFillMaze {
+ FloodFillMaze(
+ maze.0
+ .iter()
+ .map(|row| row.iter().map(|_| None).collect())
+ .collect(),
+ )
+ }
+
+ fn fill(&mut self, point: Point, fill: FloodFill) {
+ if self.0[point.y][point.x].is_none() {
+ self.0[point.y][point.x] = Some(fill);
+ }
+ }
+}
diff --git a/2023/src/bin/day_11.rs b/2023/src/bin/day_11.rs
new file mode 100644
index 0000000..f78417e
--- /dev/null
+++ b/2023/src/bin/day_11.rs
@@ -0,0 +1,111 @@
+use nom::{
+ branch::alt,
+ character::complete::{char, line_ending},
+ combinator::{map, value},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_11.txt")?;
+ let galaxy_map = GalaxyMap::parser(&input).unwrap().1;
+
+ let part_1_expanded_galaxy_map = galaxy_map.expand(2);
+ dbg!(&part_1_expanded_galaxy_map.distance_sum_between_galaxies());
+
+ let part_2_expanded_galaxy_map = galaxy_map.expand(1_000_000);
+ dbg!(&part_2_expanded_galaxy_map.distance_sum_between_galaxies());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct GalaxyMap(Vec<Point>);
+
+#[derive(Debug, Clone)]
+struct Point {
+ x: usize,
+ y: usize,
+}
+
+#[derive(Debug, Clone)]
+enum Token {
+ Space,
+ Galaxy,
+}
+
+impl GalaxyMap {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(
+ line_ending,
+ many1(alt((
+ value(Token::Space, char('.')),
+ value(Token::Galaxy, char('#')),
+ ))),
+ ),
+ |rows| {
+ GalaxyMap(
+ rows.into_iter()
+ .enumerate()
+ .flat_map(move |(y, row)| {
+ row.into_iter()
+ .enumerate()
+ .filter_map(move |(x, token)| match token {
+ Token::Space => None,
+ Token::Galaxy => Some(Point { x, y }),
+ })
+ })
+ .collect(),
+ )
+ },
+ )(input)
+ }
+
+ fn expand(&self, expansion_factor: usize) -> GalaxyMap {
+ let max_y = self.0.iter().map(|p| p.y).max().unwrap();
+ let empty_rows: Vec<usize> = (0..max_y)
+ .filter(|y| !self.0.iter().any(|p| p.y == *y))
+ .collect();
+
+ let max_x = self.0.iter().map(|p| p.x).max().unwrap();
+ let empty_cols: Vec<usize> = (0..max_x)
+ .filter(|x| !self.0.iter().any(|p| p.x == *x))
+ .collect();
+
+ let extra_spaces = expansion_factor - 1;
+
+ GalaxyMap(
+ self.0
+ .iter()
+ .map(|p| Point {
+ x: p.x
+ + empty_cols.iter().filter(|empty_x| **empty_x < p.x).count()
+ * extra_spaces,
+ y: p.y
+ + empty_rows.iter().filter(|empty_y| **empty_y < p.y).count()
+ * extra_spaces,
+ })
+ .collect(),
+ )
+ }
+
+ fn distance_sum_between_galaxies(&self) -> usize {
+ let mut sum = 0;
+
+ for i in 0..self.0.len() {
+ for j in i + 1..self.0.len() {
+ sum += self.0[i].distance_to(&self.0[j]);
+ }
+ }
+
+ sum
+ }
+}
+
+impl Point {
+ fn distance_to(&self, other: &Point) -> usize {
+ other.x.abs_diff(self.x) + other.y.abs_diff(self.y)
+ }
+}
diff --git a/2023/src/bin/day_12.rs b/2023/src/bin/day_12.rs
new file mode 100644
index 0000000..e60b450
--- /dev/null
+++ b/2023/src/bin/day_12.rs
@@ -0,0 +1,323 @@
+use nom::{
+ branch::alt,
+ character::complete::{char, line_ending, space1, u32},
+ combinator::{map, value},
+ multi::{many1, many1_count, separated_list1},
+ sequence::separated_pair,
+ IResult,
+};
+use std::{collections::BTreeMap, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_12.txt")?;
+ let small = SpringField::parser(&input).unwrap().1;
+ dbg!(&small.possibilities_sum());
+
+ let large = small.expand();
+ dbg!(&large.possibilities_sum());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct SpringField(Vec<SpringRow>);
+
+#[derive(Debug, Clone)]
+struct SpringRow {
+ springs: Vec<Spring>,
+ check: Vec<u32>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum Spring {
+ Good,
+ Bad(usize),
+ Unknown,
+}
+
+impl SpringField {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(line_ending, SpringRow::parser), SpringField)(input)
+ }
+
+ fn possibilities_sum(&self) -> usize {
+ self.0.iter().map(|r| r.possibilities_count()).sum()
+ }
+
+ fn expand(&self) -> SpringField {
+ SpringField(self.0.iter().map(|r| r.expand()).collect())
+ }
+}
+
+impl SpringRow {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_pair(
+ many1(Spring::parser),
+ space1,
+ separated_list1(char(','), u32),
+ ),
+ |(springs, check)| SpringRow { springs, check },
+ )(input)
+ }
+
+ fn expand(&self) -> SpringRow {
+ let mut expanded = SpringRow {
+ springs: Vec::new(),
+ check: Vec::new(),
+ };
+
+ for _ in 0..5 {
+ expanded.springs.append(&mut self.springs.clone());
+ expanded.springs.push(Spring::Unknown);
+ expanded.check.append(&mut self.check.clone());
+ }
+
+ // should only have the extra Unknown between, not at the very end.
+ expanded.springs.pop();
+
+ expanded
+ }
+
+ fn possibilities_count(&self) -> usize {
+ let optimized = self.optimize_tail();
+ optimized.possibilities_count_inner(0, 0, &mut BTreeMap::new())
+ }
+
+ fn optimize_tail(&self) -> SpringRow {
+ let mut optimized = self.clone();
+ while optimized.springs.len() > 0 && optimized.check.len() > 0 {
+ match optimized.springs[optimized.springs.len() - 1] {
+ Spring::Good => {
+ optimized.springs.pop();
+ }
+ Spring::Bad(s) if s > optimized.check[optimized.check.len() - 1] as usize => {
+ panic!("Unsolvable row");
+ }
+ Spring::Bad(s) if s <= optimized.check[optimized.check.len() - 1] as usize => {
+ optimized.trim_bad_suffix();
+ }
+ Spring::Bad(_) => unreachable!(),
+ Spring::Unknown => {
+ break;
+ }
+ }
+ }
+
+ optimized
+ }
+
+ fn trim_bad_suffix(&mut self) {
+ let last_check = self.check.pop().expect("No trailing check to pop") as usize;
+ let mut check_i = 0;
+ while check_i < last_check {
+ match self
+ .springs
+ .pop()
+ .expect("Ran out of springs while optimizing suffix!")
+ {
+ Spring::Unknown => {
+ check_i += 1;
+ }
+ Spring::Bad(inc) => {
+ check_i += inc;
+ }
+ Spring::Good => {
+ panic!("Found a good spring in the middle of what must be a bad spring range");
+ }
+ }
+ }
+ let is_boundary = self.springs.len() == 0;
+ let has_extra_good_or_unknown_to_trim =
+ !is_boundary && !matches!(self.springs.last(), Some(Spring::Bad(_)));
+
+ let valid_suffix =
+ check_i == last_check && (is_boundary || has_extra_good_or_unknown_to_trim);
+ if !valid_suffix {
+ panic!("The suffix had another invalid bad section immediately!");
+ }
+
+ if has_extra_good_or_unknown_to_trim {
+ self.springs.pop();
+ }
+ }
+
+ fn possibilities_count_inner(
+ &self,
+ springs_offset: usize,
+ check_offset: usize,
+ cache: &mut BTreeMap<(usize, usize), usize>,
+ ) -> usize {
+ if let Some(cached) = cache.get(&(springs_offset, check_offset)) {
+ return *cached;
+ };
+
+ let (springs_offset, check_offset) = match self.optimize_head(springs_offset, check_offset)
+ {
+ Some(optimized) => optimized,
+ None => {
+ return 0;
+ }
+ };
+
+ let springs = &self.springs[springs_offset..];
+ let check = &self.check[check_offset..];
+
+ if check.len() == 1 && springs.iter().all(|s| !matches!(s, Spring::Bad(_))) {
+ let mut contigous_unknowns = Vec::new();
+ let mut next_contiguous_unknown = 0;
+ let mut is_in_unknown = false;
+ for spring in springs.iter() {
+ match spring {
+ Spring::Unknown => {
+ next_contiguous_unknown += 1;
+ is_in_unknown = true;
+ }
+ Spring::Good => {
+ if is_in_unknown {
+ contigous_unknowns.push(next_contiguous_unknown);
+ next_contiguous_unknown = 0;
+ is_in_unknown = false;
+ }
+ }
+ Spring::Bad(_) => unreachable!(),
+ }
+ }
+ if is_in_unknown {
+ contigous_unknowns.push(next_contiguous_unknown);
+ }
+
+ return contigous_unknowns
+ .iter()
+ .map(|region| {
+ if *region >= check[0] as usize {
+ region - check[0] as usize + 1
+ } else {
+ 0
+ }
+ })
+ .sum();
+ }
+
+ if check.len() == 0 {
+ if springs.iter().any(|s| matches!(s, Spring::Bad(_))) {
+ return 0;
+ } else {
+ return 1;
+ }
+ }
+
+ let valid_prefix_possibilities_count =
+ if let Some((without_prefix_springs_offset, without_prefix_check_offset)) =
+ self.trim_bad_prefix(springs_offset, check_offset)
+ {
+ self.possibilities_count_inner(
+ without_prefix_springs_offset,
+ without_prefix_check_offset,
+ cache,
+ )
+ } else {
+ 0
+ };
+
+ let non_prefix_possibilities_count =
+ self.possibilities_count_inner(springs_offset + 1, check_offset, cache);
+
+ let total_possibilities = valid_prefix_possibilities_count + non_prefix_possibilities_count;
+
+ cache.insert((springs_offset, check_offset), total_possibilities);
+
+ total_possibilities
+ }
+
+ fn optimize_head(
+ &self,
+ mut springs_offset: usize,
+ mut check_offset: usize,
+ ) -> Option<(usize, usize)> {
+ while springs_offset < self.springs.len() && check_offset < self.check.len() {
+ match self.springs[springs_offset] {
+ Spring::Good => {
+ springs_offset += 1;
+ }
+ Spring::Bad(s) if s > self.check[check_offset] as usize => {
+ return None;
+ }
+ Spring::Bad(s) if s <= self.check[check_offset] as usize => {
+ match self.trim_bad_prefix(springs_offset, check_offset) {
+ Some((new_springs, new_check)) => {
+ springs_offset = new_springs;
+ check_offset = new_check;
+ }
+ None => return None,
+ };
+ }
+ Spring::Bad(_) => unreachable!(),
+ Spring::Unknown => {
+ break;
+ }
+ }
+ }
+
+ if springs_offset == self.springs.len() && check_offset != self.check.len() {
+ None
+ } else {
+ Some((springs_offset, check_offset))
+ }
+ }
+
+ fn trim_bad_prefix(
+ &self,
+ springs_offset: usize,
+ check_offset: usize,
+ ) -> Option<(usize, usize)> {
+ let first_check = self.check[check_offset] as usize;
+ let mut check_i = 0;
+ let mut springs_i = springs_offset;
+ while check_i < first_check {
+ if springs_i >= self.springs.len() {
+ return None;
+ }
+
+ match self.springs[springs_i] {
+ Spring::Unknown => {
+ check_i += 1;
+ }
+ Spring::Bad(inc) => {
+ check_i += inc;
+ }
+ Spring::Good => {
+ return None;
+ }
+ }
+ springs_i += 1;
+ }
+
+ let is_boundary = springs_i == self.springs.len();
+ let has_extra_good_or_unknown_to_trim =
+ !is_boundary && !matches!(self.springs[springs_i], Spring::Bad(_));
+ let valid_prefix =
+ check_i == first_check && (is_boundary || has_extra_good_or_unknown_to_trim);
+
+ if valid_prefix {
+ let new_springs_i = if has_extra_good_or_unknown_to_trim {
+ springs_i + 1
+ } else {
+ springs_i
+ };
+ Some((new_springs_i, check_offset + 1))
+ } else {
+ None
+ }
+ }
+}
+
+impl Spring {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(Spring::Good, many1_count(char('.'))),
+ map(many1_count(char('#')), Spring::Bad),
+ value(Spring::Unknown, char('?')),
+ ))(input)
+ }
+}
diff --git a/2023/src/bin/day_13.rs b/2023/src/bin/day_13.rs
new file mode 100644
index 0000000..ad97602
--- /dev/null
+++ b/2023/src/bin/day_13.rs
@@ -0,0 +1,82 @@
+use nom::{
+ character::complete::{line_ending, one_of},
+ combinator::map,
+ multi::{many1, separated_list1},
+ sequence::pair,
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_13.txt")?;
+ let parsed = ManyMaps::parser(&input).unwrap().1;
+ dbg!(&parsed.reflection_score_sum(0));
+ dbg!(&parsed.reflection_score_sum(1));
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct ManyMaps(Vec<Map>);
+
+#[derive(Debug)]
+struct Map {
+ rows: Vec<String>,
+ cols: Vec<String>,
+}
+
+impl ManyMaps {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(pair(line_ending, line_ending), Map::parser),
+ ManyMaps,
+ )(input)
+ }
+
+ fn reflection_score_sum(&self, expected_smudge_count: usize) -> usize {
+ self.0
+ .iter()
+ .map(|m| m.reflection_score(expected_smudge_count))
+ .sum()
+ }
+}
+
+impl Map {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(line_ending, many1(one_of(".#"))), |rows| {
+ Map {
+ rows: rows
+ .iter()
+ .map(|char_array| char_array.iter().collect::<String>())
+ .collect(),
+ cols: (0..rows[0].len())
+ .map(|i| rows.iter().map(|row| row[i]).collect::<String>())
+ .collect(),
+ }
+ })(input)
+ }
+
+ fn reflection_score(&self, expected_smudge_count: usize) -> usize {
+ reflection_i(&self.cols, expected_smudge_count)
+ .or_else(|| reflection_i(&self.rows, expected_smudge_count).map(|y| y * 100))
+ .expect("No reflection!")
+ }
+}
+
+fn reflection_i(rows: &[String], expected_smudge_count: usize) -> Option<usize> {
+ for i in 1..rows.len() {
+ let mut smudge_count = 0;
+ for d in 1..=(i.min(rows.len() - i)) {
+ smudge_count += rows[i - d]
+ .chars()
+ .zip(rows[i + d - 1].chars())
+ .filter(|(a, b)| a != b)
+ .count();
+ }
+ let is_reflection = smudge_count == expected_smudge_count;
+ if is_reflection {
+ return Some(i);
+ }
+ }
+ None
+}
diff --git a/2023/src/bin/day_14.rs b/2023/src/bin/day_14.rs
new file mode 100644
index 0000000..e975f41
--- /dev/null
+++ b/2023/src/bin/day_14.rs
@@ -0,0 +1,145 @@
+use nom::{
+ branch::alt,
+ character::complete::{char, line_ending},
+ combinator::{map, value},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::{collections::BTreeMap, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_14.txt")?;
+ let rock_field = RockField::parser(&input).unwrap().1;
+ {
+ let mut north_rock_field = rock_field.clone();
+ north_rock_field.tilt_north();
+ dbg!(&north_rock_field.north_load());
+ }
+
+ {
+ let mut spin_rock_field = rock_field.clone();
+ let mut last_east = BTreeMap::new();
+ let mut i = 0;
+ let target = 1000000000;
+ while i < target {
+ spin_rock_field.tilt_north();
+ spin_rock_field.tilt_west();
+ spin_rock_field.tilt_south();
+ spin_rock_field.tilt_east();
+ if let Some(last_i) = last_east.get(&spin_rock_field) {
+ let interval = i - last_i;
+ // relying on integer division to round down here, want to add
+ // interval as many times as possible without going over target.
+ i += ((target - i) / interval) * interval;
+ } else {
+ last_east.insert(spin_rock_field.clone(), i);
+ }
+ i += 1;
+ }
+ dbg!(&spin_rock_field.north_load());
+ }
+
+ Ok(())
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
+struct RockField(Vec<Vec<Rock>>);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+enum Rock {
+ Rounded,
+ Cubed,
+ None,
+}
+
+impl RockField {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(line_ending, many1(Rock::parser)), RockField)(input)
+ }
+
+ fn tilt_north(&mut self) {
+ for y in 0..self.0.len() {
+ for x in 0..self.0[y].len() {
+ if self.0[y][x] == Rock::Rounded {
+ let new_y = (0..y)
+ .rev()
+ .take_while(|new_y| self.0[*new_y][x] == Rock::None)
+ .last();
+ if let Some(new_y) = new_y {
+ self.0[new_y][x] = Rock::Rounded;
+ self.0[y][x] = Rock::None;
+ }
+ }
+ }
+ }
+ }
+
+ fn tilt_west(&mut self) {
+ for x in 0..self.0[0].len() {
+ for y in 0..self.0.len() {
+ if self.0[y][x] == Rock::Rounded {
+ let new_x = (0..x)
+ .rev()
+ .take_while(|new_x| self.0[y][*new_x] == Rock::None)
+ .last();
+ if let Some(new_x) = new_x {
+ self.0[y][new_x] = Rock::Rounded;
+ self.0[y][x] = Rock::None;
+ }
+ }
+ }
+ }
+ }
+
+ fn tilt_south(&mut self) {
+ for y in (0..self.0.len()).rev() {
+ for x in 0..self.0[y].len() {
+ if self.0[y][x] == Rock::Rounded {
+ let new_y = (y + 1..self.0.len())
+ .take_while(|new_y| self.0[*new_y][x] == Rock::None)
+ .last();
+ if let Some(new_y) = new_y {
+ self.0[new_y][x] = Rock::Rounded;
+ self.0[y][x] = Rock::None;
+ }
+ }
+ }
+ }
+ }
+
+ fn tilt_east(&mut self) {
+ for x in (0..self.0[0].len()).rev() {
+ for y in 0..self.0.len() {
+ if self.0[y][x] == Rock::Rounded {
+ let new_x = (x + 1..self.0[0].len())
+ .take_while(|new_x| self.0[y][*new_x] == Rock::None)
+ .last();
+ if let Some(new_x) = new_x {
+ self.0[y][new_x] = Rock::Rounded;
+ self.0[y][x] = Rock::None;
+ }
+ }
+ }
+ }
+ }
+
+ fn north_load(&self) -> usize {
+ self.0
+ .iter()
+ .enumerate()
+ .map(|(y, row)| {
+ row.iter().filter(|r| **r == Rock::Rounded).count() * (self.0.len() - y)
+ })
+ .sum()
+ }
+}
+
+impl Rock {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(Rock::Rounded, char('O')),
+ value(Rock::Cubed, char('#')),
+ value(Rock::None, char('.')),
+ ))(input)
+ }
+}
diff --git a/2023/src/bin/day_15.rs b/2023/src/bin/day_15.rs
new file mode 100644
index 0000000..429e7f4
--- /dev/null
+++ b/2023/src/bin/day_15.rs
@@ -0,0 +1,153 @@
+use std::fs;
+
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::{alpha1, u32},
+ combinator::{consumed, map, value},
+ multi::separated_list1,
+ sequence::{pair, preceded},
+ IResult,
+};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_15.txt")?;
+ let parsed = InitializationInstructions::parser(&input).unwrap().1;
+ dbg!(&parsed.hash_sum());
+ dbg!(&parsed.final_focusing_power());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct InitializationInstructions(Vec<Instruction>);
+
+#[derive(Debug)]
+struct Instruction {
+ instruction: String,
+ label: String,
+ action: Action,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum Action {
+ Remove,
+ Insert(u32),
+}
+
+#[derive(Default, Debug, Clone)]
+struct LensBox {
+ lenses: Vec<Lens>,
+}
+
+#[derive(Debug, Clone)]
+struct Lens {
+ label: String,
+ power: u32,
+}
+
+impl InitializationInstructions {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(tag(","), Instruction::parser),
+ InitializationInstructions,
+ )(input)
+ }
+
+ fn hash_sum(&self) -> usize {
+ self.0.iter().map(|i| hash(&i.instruction)).sum()
+ }
+
+ fn final_focusing_power(&self) -> u32 {
+ let mut boxes = vec![LensBox::default(); 256];
+ for instruction in &self.0 {
+ let box_index = hash(&instruction.label);
+ let lens_box = &mut boxes[box_index];
+ match instruction.action {
+ Action::Remove => {
+ lens_box
+ .lenses
+ .retain(|lens| lens.label != instruction.label);
+ }
+ Action::Insert(power) => {
+ let existing_position = lens_box
+ .lenses
+ .iter()
+ .position(|lens| lens.label == instruction.label);
+ match existing_position {
+ Some(position) => {
+ lens_box.lenses[position].power = power;
+ }
+ None => {
+ lens_box.lenses.push(Lens {
+ label: instruction.label.clone(),
+ power,
+ });
+ }
+ }
+ }
+ }
+ }
+
+ boxes
+ .into_iter()
+ .enumerate()
+ .flat_map(|(box_index, lens_box)| {
+ lens_box
+ .lenses
+ .into_iter()
+ .enumerate()
+ .map(move |(lens_index, lens)| {
+ (box_index as u32 + 1) * (lens_index as u32 + 1) * lens.power
+ })
+ })
+ .sum()
+ }
+}
+
+impl Instruction {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ consumed(pair(alpha1, Action::parser)),
+ |(instruction, (label, action))| Instruction {
+ instruction: instruction.to_owned(),
+ label: label.to_owned(),
+ action,
+ },
+ )(input)
+ }
+}
+
+impl Action {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(Action::Remove, tag("-")),
+ map(preceded(tag("="), u32), Action::Insert),
+ ))(input)
+ }
+}
+
+fn hash(input: &str) -> usize {
+ let mut result: usize = 0;
+ for c in input.bytes() {
+ result += Into::<usize>::into(c);
+ result *= 17;
+ result %= 256;
+ }
+ result
+}
+
+#[test]
+fn examples() {
+ assert_eq!(hash("rn=1"), 30);
+ assert_eq!(hash("cm-"), 253);
+ assert_eq!(hash("qp=3"), 97);
+ assert_eq!(hash("cm=2"), 47);
+ assert_eq!(hash("qp-"), 14);
+ assert_eq!(hash("pc=4"), 180);
+ assert_eq!(hash("ot=9"), 9);
+ assert_eq!(hash("ab=5"), 197);
+ assert_eq!(hash("pc-"), 48);
+ assert_eq!(hash("pc=6"), 214);
+ assert_eq!(hash("ot=7"), 231);
+}
diff --git a/2023/src/bin/day_16.rs b/2023/src/bin/day_16.rs
new file mode 100644
index 0000000..2c02d92
--- /dev/null
+++ b/2023/src/bin/day_16.rs
@@ -0,0 +1,209 @@
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::line_ending,
+ combinator::{map, value},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::{collections::BTreeSet, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_16.txt")?;
+ let device = LightDevice::parser(&input).unwrap().1;
+ {
+ let mut part_1_device = device.clone();
+ part_1_device.energize(Point { x: 0, y: 0 }, Direction::East);
+ dbg!(&part_1_device.count_energized_blocks());
+ }
+
+ dbg!(device.find_max_energization());
+
+ Ok(())
+}
+
+#[derive(Debug, Clone)]
+struct LightDevice {
+ mirrors: Vec<Vec<LightBlock>>,
+ light: Vec<Vec<BTreeSet<Direction>>>,
+ bounds: Point,
+}
+
+#[derive(Debug, Clone, Copy)]
+enum LightBlock {
+ MirrorForwardLeaning, // /
+ MirrorBackwardsLeaning, // \
+ Empty, // .
+ HorizontalSplitter, // -
+ VerticalSplitter, // |
+}
+
+#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)]
+enum Direction {
+ North,
+ South,
+ East,
+ West,
+}
+
+#[derive(Debug, Clone)]
+struct Point {
+ x: usize,
+ y: usize,
+}
+
+impl LightDevice {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, many1(LightBlock::parser)),
+ |mirrors| LightDevice {
+ bounds: Point {
+ x: mirrors[0].len(),
+ y: mirrors.len(),
+ },
+ light: mirrors
+ .iter()
+ .map(|mirror_row| vec![BTreeSet::new(); mirror_row.len()])
+ .collect(),
+ mirrors,
+ },
+ )(input)
+ }
+
+ fn energize(&mut self, start_point: Point, start_direction: Direction) {
+ let mut frontier = vec![(start_point.clone(), start_direction.clone())];
+ self.light[start_point.y][start_point.x].insert(start_direction);
+
+ while let Some((front_light_p, front_light_dir)) = frontier.pop() {
+ let mirror = self.mirrors[front_light_p.y][front_light_p.x];
+ let new_dirs: Vec<Direction> = match (mirror, front_light_dir) {
+ (LightBlock::MirrorForwardLeaning, Direction::North) => vec![Direction::East],
+ (LightBlock::MirrorForwardLeaning, Direction::South) => vec![Direction::West],
+ (LightBlock::MirrorForwardLeaning, Direction::East) => vec![Direction::North],
+ (LightBlock::MirrorForwardLeaning, Direction::West) => vec![Direction::South],
+ (LightBlock::MirrorBackwardsLeaning, Direction::North) => vec![Direction::West],
+ (LightBlock::MirrorBackwardsLeaning, Direction::South) => vec![Direction::East],
+ (LightBlock::MirrorBackwardsLeaning, Direction::East) => vec![Direction::South],
+ (LightBlock::MirrorBackwardsLeaning, Direction::West) => vec![Direction::North],
+ (LightBlock::HorizontalSplitter, Direction::North)
+ | (LightBlock::HorizontalSplitter, Direction::South) => {
+ vec![Direction::East, Direction::West]
+ }
+ (LightBlock::VerticalSplitter, Direction::East)
+ | (LightBlock::VerticalSplitter, Direction::West) => {
+ vec![Direction::North, Direction::South]
+ }
+ (LightBlock::Empty, dir)
+ | (LightBlock::HorizontalSplitter, dir)
+ | (LightBlock::VerticalSplitter, dir) => vec![dir],
+ };
+
+ let new_lights: Vec<(Point, Direction)> = new_dirs
+ .into_iter()
+ .filter_map(|dir| front_light_p.go(dir, &self.bounds).map(|p| (p, dir)))
+ .collect();
+
+ for (new_light_p, new_light_dir) in new_lights {
+ if !self.light[new_light_p.y][new_light_p.x].contains(&new_light_dir) {
+ self.light[new_light_p.y][new_light_p.x].insert(new_light_dir);
+ frontier.push((new_light_p, new_light_dir));
+ }
+ }
+ }
+ }
+
+ fn count_energized_blocks(&self) -> usize {
+ self.light
+ .iter()
+ .flat_map(|row| row.iter())
+ .filter(|set| set.len() > 0)
+ .count()
+ }
+
+ fn find_max_energization(&self) -> usize {
+ (0..self.bounds.x)
+ .flat_map(|x| {
+ vec![
+ (Point { x, y: 0 }, Direction::South),
+ (
+ Point {
+ x,
+ y: self.bounds.y - 1,
+ },
+ Direction::North,
+ ),
+ ]
+ })
+ .chain((0..self.bounds.y).flat_map(|y| {
+ vec![
+ (Point { x: 0, y }, Direction::East),
+ (
+ Point {
+ x: self.bounds.x - 1,
+ y,
+ },
+ Direction::West,
+ ),
+ ]
+ }))
+ .map(|(start_p, start_d)| {
+ let mut energizer = self.clone();
+ energizer.energize(start_p, start_d);
+ energizer.count_energized_blocks()
+ })
+ .max()
+ .unwrap()
+ }
+}
+
+impl LightBlock {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(LightBlock::MirrorForwardLeaning, tag("/")),
+ value(LightBlock::MirrorBackwardsLeaning, tag("\\")),
+ value(LightBlock::Empty, tag(".")),
+ value(LightBlock::HorizontalSplitter, tag("-")),
+ value(LightBlock::VerticalSplitter, tag("|")),
+ ))(input)
+ }
+}
+
+impl Point {
+ fn up(&self) -> Point {
+ Point {
+ x: self.x,
+ y: self.y - 1,
+ }
+ }
+
+ fn down(&self) -> Point {
+ Point {
+ x: self.x,
+ y: self.y + 1,
+ }
+ }
+
+ fn left(&self) -> Point {
+ Point {
+ x: self.x - 1,
+ y: self.y,
+ }
+ }
+
+ fn right(&self) -> Point {
+ Point {
+ x: self.x + 1,
+ y: self.y,
+ }
+ }
+
+ fn go(&self, dir: Direction, bounds: &Point) -> Option<Point> {
+ match dir {
+ Direction::North if self.y > 0 => Some(self.up()),
+ Direction::South if self.y < bounds.y - 1 => Some(self.down()),
+ Direction::West if self.x > 0 => Some(self.left()),
+ Direction::East if self.x < bounds.x - 1 => Some(self.right()),
+ _ => None,
+ }
+ }
+}
diff --git a/2023/src/bin/day_17.rs b/2023/src/bin/day_17.rs
new file mode 100644
index 0000000..8d839e5
--- /dev/null
+++ b/2023/src/bin/day_17.rs
@@ -0,0 +1,144 @@
+use nalgebra::{DMatrix, Point2, Unit, Vector2};
+use nom::{
+ character::complete::{line_ending, satisfy},
+ combinator::{map, map_res},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::{collections::HashSet, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_17.txt")?;
+ let parsed = HeatlossMap::parser(&input).unwrap().1;
+ dbg!(&parsed.find_shortest_heatloss_path(false));
+ dbg!(&parsed.find_shortest_heatloss_path(true));
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct HeatlossMap(DMatrix<u32>);
+
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+struct Position {
+ pos: Point2<isize>,
+ facing: Unit<Vector2<isize>>,
+ duration_with_facing: usize,
+}
+
+impl HeatlossMap {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(
+ line_ending,
+ many1(map_res(satisfy(|c| c.is_digit(10)), |d| {
+ d.to_string().parse::<u32>()
+ })),
+ ),
+ |digits_vec| {
+ HeatlossMap(DMatrix::from_iterator(
+ digits_vec.len(),
+ digits_vec[0].len(),
+ digits_vec.into_iter().flat_map(|row| row.into_iter()),
+ ))
+ },
+ )(input)
+ }
+
+ fn find_shortest_heatloss_path(&self, is_ultra_crucible: bool) -> u32 {
+ let start = Position {
+ pos: Point2::new(0, 0),
+ facing: Vector2::x_axis(),
+ duration_with_facing: 0,
+ };
+ let end_pos = self.end();
+
+ let mut frontier = vec![(0, start.clone())];
+ let mut visited = HashSet::new();
+ visited.insert(start);
+
+ let mut distance_to_end = None;
+ while distance_to_end.is_none() && frontier.len() > 0 {
+ // shortest distance is now at the end
+ frontier
+ .sort_unstable_by(|(distance_a, _), (distance_b, _)| distance_b.cmp(distance_a));
+ let (front_distance, front_position) = frontier.pop().unwrap();
+
+ let mut next_points: Vec<Position> = Vec::new();
+
+ if !is_ultra_crucible || front_position.duration_with_facing >= 4 {
+ let facing_l = rotate_left(&front_position.facing);
+ let pos_l = front_position.pos + *facing_l;
+ next_points.push(Position {
+ pos: pos_l,
+ facing: facing_l,
+ duration_with_facing: 1,
+ });
+
+ let facing_r = rotate_right(&front_position.facing);
+ let pos_r = front_position.pos + *facing_r;
+ next_points.push(Position {
+ pos: pos_r,
+ facing: facing_r,
+ duration_with_facing: 1,
+ });
+ }
+
+ if (is_ultra_crucible && front_position.duration_with_facing < 10)
+ || (!is_ultra_crucible && front_position.duration_with_facing < 3)
+ {
+ let pos = front_position.pos + *front_position.facing;
+ next_points.push(Position {
+ pos,
+ facing: front_position.facing.clone(),
+ duration_with_facing: front_position.duration_with_facing + 1,
+ });
+ }
+
+ for next_point in next_points {
+ if let Some(step_distance) = self.get(&next_point.pos) {
+ if !visited.contains(&next_point) {
+ visited.insert(next_point.clone());
+
+ let distance = front_distance + step_distance;
+
+ if next_point.pos == end_pos {
+ if is_ultra_crucible {
+ if next_point.duration_with_facing >= 4 {
+ distance_to_end = Some(distance);
+ }
+ } else {
+ distance_to_end = Some(distance);
+ }
+ }
+ frontier.push((distance, next_point));
+ }
+ }
+ }
+ }
+
+ distance_to_end.unwrap()
+ }
+
+ fn end(&self) -> Point2<isize> {
+ Point2::new(self.0.ncols() as isize - 1, self.0.nrows() as isize - 1)
+ }
+
+ fn get(&self, pos: &Point2<isize>) -> Option<u32> {
+ let x: Option<usize> = pos.x.try_into().ok();
+ let y: Option<usize> = pos.y.try_into().ok();
+
+ match (x, y) {
+ (Some(x), Some(y)) => self.0.get((x, y)).copied(),
+ _ => None,
+ }
+ }
+}
+
+fn rotate_left(facing: &Unit<Vector2<isize>>) -> Unit<Vector2<isize>> {
+ Unit::new_unchecked(Vector2::new(-facing.y, facing.x))
+}
+
+fn rotate_right(facing: &Unit<Vector2<isize>>) -> Unit<Vector2<isize>> {
+ Unit::new_unchecked(Vector2::new(facing.y, -facing.x))
+}
diff --git a/2023/src/bin/day_18.rs b/2023/src/bin/day_18.rs
new file mode 100644
index 0000000..a4c9355
--- /dev/null
+++ b/2023/src/bin/day_18.rs
@@ -0,0 +1,214 @@
+use nalgebra::{Point2, Vector2};
+use nom::{
+ branch::alt,
+ bytes::complete::{tag, take},
+ character::complete::{char, hex_digit1, line_ending, space1, u32},
+ combinator::{map, map_res, value},
+ multi::separated_list1,
+ sequence::tuple,
+ IResult,
+};
+use std::{collections::HashMap, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_18.txt")?;
+ let parsed = Instructions::parser(&input).unwrap().1;
+ let mut fill_map = parsed.draw_map();
+ fill_map.derive_inside_outside();
+ dbg!(&fill_map.count_inside());
+ dbg!(&parsed.find_internal_area());
+
+ let hex_parsed = Instructions::hex_parser(&input).unwrap().1;
+ dbg!(&hex_parsed.find_internal_area());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct Instructions(Vec<Instruction>);
+
+#[derive(Debug)]
+struct Instruction {
+ direction: Vector2<i64>,
+ distance: u32,
+}
+
+#[derive(Debug)]
+struct FloodFillMap {
+ map: HashMap<Point2<i64>, Fill>,
+ top_left: Point2<i64>,
+ bottom_right: Point2<i64>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+enum Fill {
+ Outside,
+ Inside,
+}
+
+impl Instructions {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, Instruction::parser),
+ Instructions,
+ )(input)
+ }
+
+ fn hex_parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, Instruction::hex_parser),
+ Instructions,
+ )(input)
+ }
+
+ fn draw_map(&self) -> FloodFillMap {
+ let mut trench = HashMap::new();
+ let mut current = Point2::new(0, 0);
+ for instruction in &self.0 {
+ for _ in 0..instruction.distance {
+ current += instruction.direction;
+ trench.insert(current.clone(), Fill::Inside);
+ }
+ }
+
+ FloodFillMap::new(trench)
+ }
+
+ fn find_internal_area(&self) -> i64 {
+ let mut current_point = Point2::new(0, 0);
+ let mut points = vec![current_point];
+
+ let mut perimeter = 0;
+ for instruction in &self.0 {
+ let next_point = current_point + instruction.direction * instruction.distance as i64;
+ points.push(next_point);
+ current_point = next_point;
+ perimeter += instruction.distance as i64;
+ }
+
+ let mut area = 0;
+ for point in points.windows(2) {
+ if let &[p1, p2] = point {
+ area += p1.x * p2.y;
+ area -= p1.y * p2.x;
+ } else {
+ unreachable!()
+ }
+ }
+
+ (perimeter + 2 + area) / 2
+ }
+}
+
+impl Instruction {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ dir_parser,
+ space1,
+ u32,
+ space1,
+ tag("(#"),
+ hex_digit1,
+ tag(")"),
+ )),
+ |(direction, _, distance, _, _, _, _)| Instruction {
+ direction,
+ distance,
+ },
+ )(input)
+ }
+
+ fn hex_parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ dir_parser,
+ space1,
+ u32,
+ space1,
+ tag("(#"),
+ map_res(take(5usize), |hex| u32::from_str_radix(hex, 16)),
+ hex_dir_parser,
+ tag(")"),
+ )),
+ |(_, _, _, _, _, distance, direction, _)| Instruction {
+ direction,
+ distance,
+ },
+ )(input)
+ }
+}
+
+fn dir_parser(input: &str) -> IResult<&str, Vector2<i64>> {
+ alt((
+ value(Vector2::new(0, -1), char('U')),
+ value(Vector2::new(0, 1), char('D')),
+ value(Vector2::new(-1, 0), char('L')),
+ value(Vector2::new(1, 0), char('R')),
+ ))(input)
+}
+
+fn hex_dir_parser(input: &str) -> IResult<&str, Vector2<i64>> {
+ alt((
+ value(Vector2::new(0, -1), char('3')),
+ value(Vector2::new(0, 1), char('1')),
+ value(Vector2::new(-1, 0), char('2')),
+ value(Vector2::new(1, 0), char('0')),
+ ))(input)
+}
+
+impl FloodFillMap {
+ fn new(map: HashMap<Point2<i64>, Fill>) -> FloodFillMap {
+ let top_left = Point2::new(
+ map.keys().map(|key| key.x).min().unwrap() - 1,
+ map.keys().map(|key| key.y).min().unwrap() - 1,
+ );
+
+ let bottom_right = Point2::new(
+ map.keys().map(|key| key.x).max().unwrap() + 1,
+ map.keys().map(|key| key.y).max().unwrap() + 1,
+ );
+
+ FloodFillMap {
+ map,
+ top_left,
+ bottom_right,
+ }
+ }
+
+ fn derive_inside_outside(&mut self) {
+ self.flood_fill(self.top_left.clone(), &Fill::Outside);
+ for y in self.top_left.y..=self.bottom_right.y {
+ for x in self.top_left.x..=self.bottom_right.x {
+ let current = Point2::new(x, y);
+ self.map.entry(current).or_insert(Fill::Inside);
+ }
+ }
+ }
+
+ fn count_inside(&self) -> usize {
+ self.map
+ .values()
+ .filter(|fill| **fill == Fill::Inside)
+ .count()
+ }
+
+ fn flood_fill(&mut self, start: Point2<i64>, fill: &Fill) {
+ let mut to_fill = vec![start];
+
+ while let Some(next) = to_fill.pop() {
+ if next.y >= self.top_left.y
+ && next.x >= self.top_left.x
+ && next.y <= self.bottom_right.y
+ && next.x <= self.bottom_right.x
+ && !self.map.contains_key(&next)
+ {
+ self.map.insert(next.clone(), fill.clone());
+ to_fill.push(next + Vector2::new(-1, 0));
+ to_fill.push(next + Vector2::new(1, 0));
+ to_fill.push(next + Vector2::new(0, -1));
+ to_fill.push(next + Vector2::new(0, 1));
+ }
+ }
+ }
+}
diff --git a/2023/src/bin/day_19.rs b/2023/src/bin/day_19.rs
new file mode 100644
index 0000000..8a7dabd
--- /dev/null
+++ b/2023/src/bin/day_19.rs
@@ -0,0 +1,348 @@
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::{alpha1, line_ending, u32 as nom_u32},
+ combinator::{map, value},
+ multi::separated_list1,
+ sequence::{pair, preceded, terminated, tuple},
+ IResult,
+};
+use std::{collections::BTreeMap, fs, ops::Range};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_19.txt")?;
+ let parsed = PartSortingMess::parser(&input).unwrap().1;
+ dbg!(&parsed.accepted_part_ratings());
+ dbg!(&parsed.count_distinct_combinations());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct PartSortingMess {
+ workflows: BTreeMap<String, Workflow>,
+ parts: Vec<Part>,
+}
+
+#[derive(Debug)]
+struct Workflow {
+ id: String,
+ conditions: Vec<WorkflowStep>,
+ if_none_match: WorkflowOutcome,
+}
+
+#[derive(Debug)]
+struct WorkflowStep {
+ field: PartField,
+ condition: WorkflowCondition,
+ result: WorkflowOutcome,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum PartField {
+ X,
+ M,
+ A,
+ S,
+}
+
+#[derive(Debug)]
+enum WorkflowCondition {
+ LessThan(u32),
+ GreaterThan(u32),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum WorkflowOutcome {
+ Accept,
+ Reject,
+ Defer(String),
+}
+
+#[derive(Debug)]
+struct Part {
+ x: u32,
+ m: u32,
+ a: u32,
+ s: u32,
+}
+
+#[derive(Debug, Clone)]
+struct PartRange {
+ x: Range<u32>,
+ m: Range<u32>,
+ a: Range<u32>,
+ s: Range<u32>,
+}
+
+impl PartSortingMess {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ separated_list1(line_ending, Workflow::parser),
+ line_ending,
+ line_ending,
+ separated_list1(line_ending, Part::parser),
+ )),
+ |(workflows, _, _, parts)| PartSortingMess {
+ workflows: workflows
+ .into_iter()
+ .map(|workflow| (workflow.id.clone(), workflow))
+ .collect(),
+ parts,
+ },
+ )(input)
+ }
+
+ fn accepted_part_ratings(&self) -> u32 {
+ let mut rating_sum = 0;
+
+ for part in &self.parts {
+ let mut outcome = WorkflowOutcome::Defer("in".into());
+ while let Some(workflow_id) = outcome.next_workflow_id() {
+ let workflow = self.workflows.get(&workflow_id).unwrap();
+ outcome = workflow.process(part);
+ }
+
+ if outcome == WorkflowOutcome::Accept {
+ rating_sum += part.rating();
+ }
+ }
+
+ rating_sum
+ }
+
+ fn count_distinct_combinations(&self) -> u64 {
+ self.count_combinations_for_workflow(
+ "in",
+ &PartRange {
+ x: 1..4001,
+ m: 1..4001,
+ a: 1..4001,
+ s: 1..4001,
+ },
+ )
+
+ // potential workflow optimizations to make the stack not so deep
+ // - eliminate conditions in a workflow where everything has the same outcome
+ // - inline workflows with a single outcome
+ // - eliminate unreachable branches like a<10, a<5
+ }
+
+ fn count_combinations_for_workflow(&self, workflow_id: &str, part_range: &PartRange) -> u64 {
+ if part_range.combinations() == 0 {
+ 0
+ } else {
+ let mut combinations = 0;
+
+ let workflow = self.workflows.get(workflow_id).unwrap();
+ let mut remaining_range = part_range.clone();
+ for condition in &workflow.conditions {
+ let matched_range: PartRange;
+ (matched_range, remaining_range) =
+ remaining_range.partition(&condition.field, &condition.condition);
+ combinations += match &condition.result {
+ WorkflowOutcome::Accept => matched_range.combinations(),
+ WorkflowOutcome::Reject => 0,
+ WorkflowOutcome::Defer(next_workflow_id) => {
+ self.count_combinations_for_workflow(&next_workflow_id, &matched_range)
+ }
+ };
+ }
+ combinations += match &workflow.if_none_match {
+ WorkflowOutcome::Accept => remaining_range.combinations(),
+ WorkflowOutcome::Reject => 0,
+ WorkflowOutcome::Defer(next_workflow_id) => {
+ self.count_combinations_for_workflow(&next_workflow_id, &remaining_range)
+ }
+ };
+
+ combinations
+ }
+ }
+}
+
+impl Workflow {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ pair(
+ alpha1,
+ preceded(
+ tag("{"),
+ terminated(
+ pair(
+ separated_list1(tag(","), WorkflowStep::parser),
+ preceded(tag(","), WorkflowOutcome::parser),
+ ),
+ tag("}"),
+ ),
+ ),
+ ),
+ |(id, (conditions, if_none_match)): (&str, _)| Workflow {
+ id: id.to_owned(),
+ conditions,
+ if_none_match,
+ },
+ )(input)
+ }
+
+ fn process(&self, part: &Part) -> WorkflowOutcome {
+ for condition in &self.conditions {
+ let val = part.get(&condition.field);
+ if condition.condition.matches(val) {
+ return condition.result.clone();
+ }
+ }
+ self.if_none_match.clone()
+ }
+}
+
+impl WorkflowStep {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ PartField::parser,
+ WorkflowCondition::parser,
+ preceded(tag(":"), WorkflowOutcome::parser),
+ )),
+ |(field, condition, result)| WorkflowStep {
+ field,
+ condition,
+ result,
+ },
+ )(input)
+ }
+}
+
+impl PartField {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(PartField::X, tag("x")),
+ value(PartField::M, tag("m")),
+ value(PartField::A, tag("a")),
+ value(PartField::S, tag("s")),
+ ))(input)
+ }
+}
+
+impl WorkflowCondition {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ map(preceded(tag("<"), nom_u32), WorkflowCondition::LessThan),
+ map(preceded(tag(">"), nom_u32), WorkflowCondition::GreaterThan),
+ ))(input)
+ }
+
+ fn matches(&self, val: u32) -> bool {
+ match self {
+ WorkflowCondition::LessThan(me) => val < *me,
+ WorkflowCondition::GreaterThan(me) => val > *me,
+ }
+ }
+
+ fn partition(&self, val: &Range<u32>) -> (Range<u32>, Range<u32>) {
+ match self {
+ WorkflowCondition::LessThan(me) => (val.start..*me, *me..val.end),
+ WorkflowCondition::GreaterThan(me) => (*me + 1..val.end, val.start..*me + 1),
+ }
+ }
+}
+
+impl WorkflowOutcome {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(WorkflowOutcome::Accept, tag("A")),
+ value(WorkflowOutcome::Reject, tag("R")),
+ map(alpha1, |id: &str| WorkflowOutcome::Defer(id.to_owned())),
+ ))(input)
+ }
+
+ fn next_workflow_id(&self) -> Option<String> {
+ match self {
+ WorkflowOutcome::Accept | WorkflowOutcome::Reject => None,
+ WorkflowOutcome::Defer(id) => Some(id.clone()),
+ }
+ }
+}
+
+impl Part {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ tag("{x="),
+ nom_u32,
+ tag(",m="),
+ nom_u32,
+ tag(",a="),
+ nom_u32,
+ tag(",s="),
+ nom_u32,
+ tag("}"),
+ )),
+ |(_, x, _, m, _, a, _, s, _)| Part { x, m, a, s },
+ )(input)
+ }
+
+ fn rating(&self) -> u32 {
+ self.x + self.m + self.a + self.s
+ }
+
+ fn get(&self, field: &PartField) -> u32 {
+ match field {
+ PartField::X => self.x,
+ PartField::M => self.m,
+ PartField::A => self.a,
+ PartField::S => self.s,
+ }
+ }
+}
+
+impl PartRange {
+ fn combinations(&self) -> u64 {
+ if self.x.is_empty() || self.m.is_empty() || self.a.is_empty() || self.s.is_empty() {
+ 0
+ } else {
+ (self.x.end - self.x.start) as u64
+ * (self.m.end - self.m.start) as u64
+ * (self.a.end - self.a.start) as u64
+ * (self.s.end - self.s.start) as u64
+ }
+ }
+
+ fn partition(
+ &self,
+ field: &PartField,
+ condition: &WorkflowCondition,
+ ) -> (PartRange, PartRange) {
+ let (matched_range, unmatched_range) = match field {
+ PartField::X => condition.partition(&self.x),
+ PartField::M => condition.partition(&self.m),
+ PartField::A => condition.partition(&self.a),
+ PartField::S => condition.partition(&self.s),
+ };
+
+ let mut matched_part_range = self.clone();
+ let mut unmatched_part_range = self.clone();
+
+ match field {
+ PartField::X => {
+ matched_part_range.x = matched_range;
+ unmatched_part_range.x = unmatched_range;
+ }
+ PartField::M => {
+ matched_part_range.m = matched_range;
+ unmatched_part_range.m = unmatched_range;
+ }
+ PartField::A => {
+ matched_part_range.a = matched_range;
+ unmatched_part_range.a = unmatched_range;
+ }
+ PartField::S => {
+ matched_part_range.s = matched_range;
+ unmatched_part_range.s = unmatched_range;
+ }
+ };
+
+ (matched_part_range, unmatched_part_range)
+ }
+}
diff --git a/2023/src/bin/day_2.rs b/2023/src/bin/day_2.rs
new file mode 100644
index 0000000..6f19c1d
--- /dev/null
+++ b/2023/src/bin/day_2.rs
@@ -0,0 +1,109 @@
+use nom::{
+ bytes::complete::tag,
+ character::complete::{alpha1, line_ending, u32 as nom_u32},
+ combinator::map,
+ multi::separated_list1,
+ sequence::tuple,
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_2.txt")?;
+ let parsed = Games::parser(&input).unwrap().1;
+ let max_pull = Pull {
+ red: 12,
+ green: 13,
+ blue: 14,
+ };
+ dbg!(&parsed.valid_game_id_sum(&max_pull));
+ dbg!(&parsed.power_sum());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct Games(Vec<Game>);
+
+#[derive(Debug)]
+struct Game {
+ id: u32,
+ pulls: Vec<Pull>,
+}
+
+#[derive(Debug, Default)]
+struct Pull {
+ red: u32,
+ green: u32,
+ blue: u32,
+}
+
+impl Games {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(line_ending, Game::parser), Games)(input)
+ }
+
+ fn valid_game_id_sum(&self, max_pull: &Pull) -> u32 {
+ self.0
+ .iter()
+ .filter(|game| game.is_valid(max_pull))
+ .map(|game| game.id)
+ .sum()
+ }
+
+ fn power_sum(&self) -> u32 {
+ self.0.iter().map(|game| game.min_pull().power()).sum()
+ }
+}
+
+impl Game {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ tag("Game "),
+ nom_u32,
+ tag(": "),
+ separated_list1(tag("; "), Pull::parser),
+ )),
+ |(_, id, _, pulls)| Game { id, pulls },
+ )(input)
+ }
+
+ fn is_valid(&self, max_pull: &Pull) -> bool {
+ self.pulls.iter().all(|pull| {
+ pull.red <= max_pull.red && pull.blue <= max_pull.blue && pull.green <= max_pull.green
+ })
+ }
+
+ fn min_pull(&self) -> Pull {
+ Pull {
+ red: self.pulls.iter().map(|p| p.red).max().unwrap_or(0),
+ blue: self.pulls.iter().map(|p| p.blue).max().unwrap_or(0),
+ green: self.pulls.iter().map(|p| p.green).max().unwrap_or(0),
+ }
+ }
+}
+
+impl Pull {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(tag(", "), tuple((nom_u32, tag(" "), alpha1))),
+ |stones| {
+ let mut pull = Pull::default();
+ for (quantity, _, colour) in stones {
+ match colour {
+ "red" => pull.red += quantity,
+ "blue" => pull.blue += quantity,
+ "green" => pull.green += quantity,
+ other => panic!("Unexpected colour, {}", other),
+ };
+ }
+ pull
+ },
+ )(input)
+ }
+
+ fn power(&self) -> u32 {
+ self.red * self.blue * self.green
+ }
+}
diff --git a/2023/src/bin/day_20.rs b/2023/src/bin/day_20.rs
new file mode 100644
index 0000000..f2e955f
--- /dev/null
+++ b/2023/src/bin/day_20.rs
@@ -0,0 +1,334 @@
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::{alpha1, line_ending},
+ combinator::{map, map_res, value},
+ multi::separated_list1,
+ sequence::{delimited, pair, separated_pair},
+ IResult,
+};
+use std::{collections::VecDeque, fs, num::ParseIntError};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_20.txt")?;
+ let circuit = Circuit::parser(&input).unwrap().1;
+
+ {
+ let mut circuit = circuit.clone();
+ let mut pulse_tracker = PulseCounter::default();
+
+ for _ in 0..1000 {
+ circuit.push_the_button(&mut pulse_tracker);
+ }
+ dbg!(pulse_tracker.low_pulse_count * pulse_tracker.high_pulse_count);
+ }
+
+ {
+ let mut circuit = circuit.clone();
+ let mut pulse_tracker = RxWatcher::new(
+ circuit
+ .modules
+ .iter()
+ .find(|m| m.id == ModuleId::rx())
+ .unwrap()
+ .index,
+ circuit
+ .modules
+ .iter()
+ .find(|m| m.id == ModuleId::from_short_alphanumeric("th").unwrap())
+ .unwrap()
+ .index,
+ );
+
+ while !pulse_tracker.rx_got_a_low_pulse {
+ pulse_tracker.i += 1;
+ circuit.push_the_button(&mut pulse_tracker);
+ }
+ dbg!(pulse_tracker.i, pulse_tracker);
+ }
+
+ Ok(())
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+struct ModuleId(u32);
+
+#[derive(Debug, Clone)]
+struct Circuit {
+ modules: Vec<Module>,
+}
+
+#[derive(Debug, Clone)]
+struct ParsedModule {
+ id: ModuleId,
+ outs: Vec<ModuleId>,
+ state: ModuleState,
+}
+
+#[derive(Debug, Clone)]
+struct Module {
+ id: ModuleId,
+ index: usize,
+ outs: Vec<usize>,
+ state: ModuleState,
+}
+
+#[derive(Debug, Clone)]
+enum ModuleState {
+ FlipFlop(bool),
+ Conjunction(u64),
+ Sink,
+}
+
+#[derive(Debug, Default)]
+struct PulseCounter {
+ pulses: VecDeque<Pulse>,
+ low_pulse_count: u64,
+ high_pulse_count: u64,
+}
+
+#[derive(Debug)]
+struct RxWatcher {
+ pulses: VecDeque<Pulse>,
+ rx_module_index: usize,
+ rx_got_a_low_pulse: bool,
+ logging_module_index: usize,
+ i: usize,
+}
+
+impl RxWatcher {
+ fn new(rx_module_index: usize, logging_module_index: usize) -> Self {
+ RxWatcher {
+ pulses: VecDeque::default(),
+ rx_module_index,
+ rx_got_a_low_pulse: false,
+ logging_module_index,
+ i: 0,
+ }
+ }
+}
+
+#[derive(Debug)]
+struct Pulse {
+ state: bool,
+ input: usize,
+ output: usize,
+}
+
+impl Circuit {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ pair(
+ delimited(
+ tag("broadcaster -> "),
+ ParsedModule::outs_parser,
+ line_ending,
+ ),
+ separated_list1(line_ending, ParsedModule::parser),
+ ),
+ |(broadcaster, parsed_modules)| {
+ let sink_index = parsed_modules.len() + 1;
+ let rx_sink_index = parsed_modules.len() + 2;
+ let mut modules: Vec<Module> = parsed_modules
+ .iter()
+ .enumerate()
+ .map(|(index, module)| Module {
+ id: module.id,
+ index: index + 1,
+ outs: module
+ .outs
+ .iter()
+ .map(|out_id| {
+ parsed_modules
+ .iter()
+ .enumerate()
+ .find(|(_, out)| &out.id == out_id)
+ .map_or_else(
+ || {
+ if out_id == &ModuleId::rx() {
+ rx_sink_index
+ } else {
+ sink_index
+ }
+ },
+ |(index, _)| index + 1,
+ )
+ })
+ .collect(),
+ state: module.state.clone(),
+ })
+ .collect();
+ modules.push(Module {
+ id: ModuleId::sink(),
+ index: sink_index,
+ outs: Vec::new(),
+ state: ModuleState::Sink,
+ });
+ modules.push(Module {
+ id: ModuleId::rx(),
+ index: rx_sink_index,
+ outs: Vec::new(),
+ state: ModuleState::Sink,
+ });
+ modules.insert(
+ 0,
+ Module {
+ id: ModuleId::broadcaster(),
+ index: 0,
+ outs: broadcaster
+ .iter()
+ .map(|mid| modules.iter().find(|m| &m.id == mid).unwrap().index)
+ .collect(),
+ state: ModuleState::Sink,
+ },
+ );
+
+ let modules_snapshot = modules.clone();
+
+ for module in &mut modules {
+ if let ModuleState::Conjunction(ref mut conjunction_state) = module.state {
+ for input in &modules_snapshot {
+ if input.outs.contains(&module.index) {
+ let mask = !(1 << input.index);
+ *conjunction_state &= mask;
+ }
+ }
+ }
+ }
+
+ Circuit { modules }
+ },
+ )(input)
+ }
+
+ fn push_the_button(&mut self, pulse_tracker: &mut impl PulseTracker) {
+ pulse_tracker.button_pulse();
+ for b in &self.modules[0].outs {
+ pulse_tracker.push(Pulse {
+ state: false,
+ input: 0,
+ output: *b,
+ });
+ }
+
+ while let Some(pulse) = pulse_tracker.pop() {
+ let module = &mut self.modules[pulse.output];
+ let new_pulse_state: Option<bool> = match module.state {
+ ModuleState::FlipFlop(ref mut current) => {
+ if !pulse.state {
+ *current = !*current;
+ Some(*current)
+ } else {
+ None
+ }
+ }
+ ModuleState::Conjunction(ref mut current) => {
+ let mask = 1 << pulse.input;
+ if pulse.state {
+ *current |= mask;
+ } else {
+ *current &= !mask;
+ }
+ Some(*current != u64::MAX)
+ }
+ ModuleState::Sink => None,
+ };
+ if let Some(new_pulse_state) = new_pulse_state {
+ for out in &module.outs {
+ pulse_tracker.push(Pulse {
+ state: new_pulse_state,
+ input: module.index,
+ output: *out,
+ });
+ }
+ }
+ }
+ }
+}
+
+trait PulseTracker {
+ fn button_pulse(&mut self);
+ fn push(&mut self, pulse: Pulse);
+ fn pop(&mut self) -> Option<Pulse>;
+}
+
+impl PulseTracker for PulseCounter {
+ fn button_pulse(&mut self) {
+ self.low_pulse_count += 1;
+ }
+
+ fn push(&mut self, pulse: Pulse) {
+ if pulse.state {
+ self.high_pulse_count += 1;
+ } else {
+ self.low_pulse_count += 1;
+ }
+ self.pulses.push_back(pulse);
+ }
+
+ fn pop(&mut self) -> Option<Pulse> {
+ self.pulses.pop_front()
+ }
+}
+
+impl PulseTracker for RxWatcher {
+ fn button_pulse(&mut self) {}
+
+ fn push(&mut self, pulse: Pulse) {
+ if pulse.state && pulse.output == self.logging_module_index {
+ println!("{}: {} into {}", self.i, pulse.input, pulse.output);
+ }
+ if !pulse.state && pulse.output == self.rx_module_index {
+ self.rx_got_a_low_pulse = true;
+ }
+ self.pulses.push_back(pulse);
+ }
+
+ fn pop(&mut self) -> Option<Pulse> {
+ self.pulses.pop_front()
+ }
+}
+
+impl ParsedModule {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_pair(
+ pair(
+ alt((
+ value(ModuleState::FlipFlop(false), tag("%")),
+ value(ModuleState::Conjunction(u64::MAX), tag("&")),
+ )),
+ map_res(alpha1, |id: &str| ModuleId::from_short_alphanumeric(id)),
+ ),
+ tag(" -> "),
+ ParsedModule::outs_parser,
+ ),
+ |((state, id), outs)| ParsedModule { id, state, outs },
+ )(input)
+ }
+
+ fn outs_parser(input: &str) -> IResult<&str, Vec<ModuleId>> {
+ separated_list1(
+ tag(", "),
+ map_res(alpha1, |s: &str| ModuleId::from_short_alphanumeric(s)),
+ )(input)
+ }
+}
+
+impl ModuleId {
+ fn broadcaster() -> ModuleId {
+ ModuleId(0)
+ }
+
+ fn sink() -> ModuleId {
+ ModuleId(u32::MAX)
+ }
+
+ fn rx() -> ModuleId {
+ ModuleId::from_short_alphanumeric("rx").unwrap()
+ }
+
+ fn from_short_alphanumeric(s: &str) -> Result<ModuleId, ParseIntError> {
+ u32::from_str_radix(s, 36).map(ModuleId)
+ }
+}
diff --git a/2023/src/bin/day_21.rs b/2023/src/bin/day_21.rs
new file mode 100644
index 0000000..9ef273e
--- /dev/null
+++ b/2023/src/bin/day_21.rs
@@ -0,0 +1,395 @@
+use nom::{
+ branch::alt,
+ character::complete::{char, line_ending},
+ combinator::{map, value},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::{collections::BTreeSet, fs, mem};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_21.txt")?;
+ let garden = WalledGarden::parser(&input).unwrap().1;
+
+ dbg!(garden.count_closed_walls_walkable_after_steps(garden.center(), 64));
+ dbg!(garden.count_open_walls_walkable_after_steps(26501365));
+
+ Ok(())
+}
+
+#[derive(Debug, Clone)]
+struct WalledGarden {
+ rocks: Vec<Vec<bool>>,
+ walkable_to: Vec<Vec<bool>>,
+ walkable_to_back: Vec<Vec<bool>>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum WalledGardenState {
+ Empty,
+ Walkable,
+ Rock,
+}
+
+#[derive(Debug)]
+struct MaxWalkable {
+ odd_steps_max: usize,
+ even_steps_max: usize,
+ min_steps: usize,
+}
+
+#[derive(Debug)]
+struct EntryPoint {
+ entry: (usize, usize),
+ max: MaxWalkable,
+}
+
+impl WalledGarden {
+ fn new(tiles: Vec<Vec<WalledGardenState>>) -> WalledGarden {
+ let rocks: Vec<Vec<bool>> = tiles
+ .iter()
+ .map(|row| {
+ row.iter()
+ .map(|t| *t == WalledGardenState::Rock)
+ .collect::<Vec<bool>>()
+ })
+ .collect();
+ let walkable_to: Vec<Vec<bool>> = vec![vec![false; rocks[0].len()]; rocks.len()];
+
+ WalledGarden {
+ rocks,
+ walkable_to_back: walkable_to.clone(),
+ walkable_to: walkable_to.clone(),
+ }
+ }
+
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, many1(WalledGardenState::parser)),
+ WalledGarden::new,
+ )(input)
+ }
+
+ fn count_closed_walls_walkable_after_steps(
+ &self,
+ start: (usize, usize),
+ steps: usize,
+ ) -> usize {
+ let mut garden = self.clone();
+ garden.walkable_to[start.1][start.0] = true;
+ for _ in 0..steps {
+ garden.closed_walls_next_mut();
+ }
+ garden.count_walkable()
+ }
+
+ fn closed_walls_find_max_walkable(&self, start: (usize, usize)) -> MaxWalkable {
+ let mut odd_steps_max = 0;
+ let mut even_steps_max = 0;
+
+ let mut garden = self.clone();
+ garden.walkable_to[start.1][start.0] = true;
+
+ let mut unchanged_count = 0;
+ for i in 1.. {
+ garden.closed_walls_next_mut();
+
+ if i % 2 == 0 {
+ let new_even_max_countable = garden.count_walkable();
+ if even_steps_max == new_even_max_countable {
+ unchanged_count += 1;
+ } else {
+ even_steps_max = new_even_max_countable;
+ }
+ } else {
+ let new_odd_max_countable = garden.count_walkable();
+ if odd_steps_max == new_odd_max_countable {
+ unchanged_count += 1;
+ } else {
+ odd_steps_max = new_odd_max_countable;
+ }
+ }
+
+ if unchanged_count > 1 {
+ return MaxWalkable {
+ odd_steps_max,
+ even_steps_max,
+ min_steps: i,
+ };
+ }
+ }
+ unreachable!()
+ }
+
+ fn closed_walls_next_mut(&mut self) {
+ for (y, row) in self.walkable_to_back.iter_mut().enumerate() {
+ for (x, tile) in row.iter_mut().enumerate() {
+ if !self.rocks[y][x] {
+ *tile = (y > 0 && self.walkable_to[y - 1][x])
+ || (y < self.walkable_to.len() - 1 && self.walkable_to[y + 1][x])
+ || (x > 0 && self.walkable_to[y][x - 1])
+ || (x < self.walkable_to[y].len() - 1 && self.walkable_to[y][x + 1]);
+ }
+ }
+ }
+
+ mem::swap(&mut self.walkable_to, &mut self.walkable_to_back);
+ }
+
+ fn count_walkable(&self) -> usize {
+ self.walkable_to
+ .iter()
+ .flat_map(|row| row.iter())
+ .filter(|s| **s)
+ .count()
+ }
+
+ fn count_open_walls_walkable_after_steps(&self, steps: usize) -> usize {
+ // assumptions:
+ // - this field is square
+ // - there is a direct path from the starting point up, down, left, and right
+ // - there is a direct path around the edges
+ // - the start is in the center
+
+ let (size_x, size_y) = self.size();
+ let (center_x, center_y) = self.center();
+
+ let max_chunks_deviance = if (steps - center_y) % size_y > 0 {
+ 1 + (steps - center_y) / size_y
+ } else {
+ (steps - center_y) / size_y
+ };
+
+ let mut total_walkable = 0;
+
+ for quadrant in [
+ EntryPoint::new((0, 0), &self),
+ EntryPoint::new((size_x - 1, 0), &self),
+ EntryPoint::new((0, size_y - 1), &self),
+ EntryPoint::new((size_x - 1, size_y - 1), &self),
+ ] {
+ let steps_to_quadrant_alignment = center_x + center_y + 2;
+
+ let mut distance_from_edge = 0;
+ while max_chunks_deviance > distance_from_edge {
+ let steps_from_alignment_to_target_chunk =
+ (max_chunks_deviance - distance_from_edge - 1) * size_y;
+ if steps_to_quadrant_alignment + steps_from_alignment_to_target_chunk > steps {
+ distance_from_edge += 1;
+ continue;
+ }
+ let steps_in_chunk =
+ steps - steps_to_quadrant_alignment - steps_from_alignment_to_target_chunk;
+ if steps_in_chunk >= quadrant.max.min_steps {
+ break;
+ }
+
+ let walkable_per_chunk =
+ self.count_closed_walls_walkable_after_steps(quadrant.entry, steps_in_chunk);
+ total_walkable += walkable_per_chunk * (max_chunks_deviance - distance_from_edge);
+ distance_from_edge += 1;
+ }
+
+ let remaining_diagonals = max_chunks_deviance - distance_from_edge;
+ let even_length_diagonals = remaining_diagonals / 2;
+ let odd_length_diagonals = even_length_diagonals + remaining_diagonals % 2;
+
+ let even_length_diagonal_chunks = even_length_diagonals * (even_length_diagonals + 1);
+ let odd_length_diagonal_chunks = odd_length_diagonals.pow(2);
+
+ let odd_diagonal_has_even_steps_left = (steps - steps_to_quadrant_alignment) % 2 == 0;
+ total_walkable += if odd_diagonal_has_even_steps_left {
+ odd_length_diagonal_chunks * quadrant.max.even_steps_max
+ + even_length_diagonal_chunks * quadrant.max.odd_steps_max
+ } else {
+ even_length_diagonal_chunks * quadrant.max.even_steps_max
+ + odd_length_diagonal_chunks * quadrant.max.odd_steps_max
+ };
+ }
+
+ for cardinal in [
+ EntryPoint::new((0, center_y), &self),
+ EntryPoint::new((center_x, 0), &self),
+ EntryPoint::new((size_x - 1, center_y), &self),
+ EntryPoint::new((center_x, size_y - 1), &self),
+ ] {
+ let steps_to_cardinal_alignment = center_y + 1;
+
+ let mut distance_from_edge = 0;
+ while max_chunks_deviance > distance_from_edge {
+ let steps_from_alignment_to_target_chunk =
+ (max_chunks_deviance - distance_from_edge - 1) * size_y;
+ let steps_in_chunk =
+ steps - steps_to_cardinal_alignment - steps_from_alignment_to_target_chunk;
+ if steps_in_chunk >= cardinal.max.min_steps {
+ break;
+ }
+
+ let walkable_per_chunk =
+ self.count_closed_walls_walkable_after_steps(cardinal.entry, steps_in_chunk);
+ total_walkable += walkable_per_chunk;
+ distance_from_edge += 1;
+ }
+
+ let remaining_chunks = max_chunks_deviance - distance_from_edge;
+ let even_index_chunks = remaining_chunks / 2;
+ let odd_index_chunks = even_index_chunks + remaining_chunks % 2;
+
+ let odd_chunk_has_even_steps_left = (steps - steps_to_cardinal_alignment) % 2 == 0;
+ total_walkable += if odd_chunk_has_even_steps_left {
+ odd_index_chunks * cardinal.max.even_steps_max
+ + even_index_chunks * cardinal.max.odd_steps_max
+ } else {
+ even_index_chunks * cardinal.max.even_steps_max
+ + odd_index_chunks * cardinal.max.odd_steps_max
+ };
+ }
+
+ for center in [EntryPoint::new((center_x, center_y), &self)] {
+ total_walkable += if steps >= center.max.min_steps {
+ if steps % 2 == 0 {
+ center.max.even_steps_max
+ } else {
+ center.max.odd_steps_max
+ }
+ } else {
+ self.count_closed_walls_walkable_after_steps(center.entry, steps)
+ };
+ }
+
+ total_walkable
+ }
+
+ fn size(&self) -> (usize, usize) {
+ (self.rocks[0].len(), self.rocks.len())
+ }
+
+ fn center(&self) -> (usize, usize) {
+ let (size_x, size_y) = self.size();
+ (size_x / 2, size_y / 2)
+ }
+}
+
+impl WalledGardenState {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(WalledGardenState::Empty, char('.')),
+ value(WalledGardenState::Walkable, char('S')),
+ value(WalledGardenState::Rock, char('#')),
+ ))(input)
+ }
+}
+
+impl EntryPoint {
+ fn new(entry: (usize, usize), garden: &WalledGarden) -> EntryPoint {
+ EntryPoint {
+ max: garden.closed_walls_find_max_walkable(entry),
+ entry,
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+struct OpenGarden {
+ rockmap_size: (isize, isize),
+ rocks: BTreeSet<(isize, isize)>,
+ walkable: BTreeSet<(isize, isize)>,
+}
+
+impl OpenGarden {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, many1(WalledGardenState::parser)),
+ |walled_garden_map| OpenGarden {
+ rockmap_size: (
+ walled_garden_map.len() as isize,
+ walled_garden_map[0].len() as isize,
+ ),
+ rocks: walled_garden_map
+ .iter()
+ .enumerate()
+ .flat_map(|(y, row)| {
+ row.iter().enumerate().filter_map(move |(x, s)| {
+ (*s == WalledGardenState::Rock).then(|| (y as isize, x as isize))
+ })
+ })
+ .collect(),
+ walkable: walled_garden_map
+ .iter()
+ .enumerate()
+ .flat_map(|(y, row)| {
+ row.iter().enumerate().filter_map(move |(x, s)| {
+ (*s == WalledGardenState::Walkable).then(|| (y as isize, x as isize))
+ })
+ })
+ .collect(),
+ },
+ )(input)
+ }
+
+ fn count_open_walls_walkable_after_steps(&self, steps: usize) -> usize {
+ let mut garden = self.clone();
+ for _ in 0..steps {
+ garden.next_mut();
+ }
+ garden.count_walkable()
+ }
+
+ fn next_mut(&mut self) {
+ let walkable = mem::take(&mut self.walkable);
+ self.walkable = walkable
+ .iter()
+ .flat_map(|(y, x)| [(y - 1, *x), (y + 1, *x), (*y, x - 1), (*y, x + 1)])
+ .filter(|(y, x)| !self.is_rock(*y, *x))
+ .collect();
+ }
+
+ fn is_rock(&self, y: isize, x: isize) -> bool {
+ let y = y.rem_euclid(self.rockmap_size.0);
+ let x = x.rem_euclid(self.rockmap_size.1);
+ self.rocks.contains(&(y, x))
+ }
+
+ fn count_walkable(&self) -> usize {
+ self.walkable.len()
+ }
+}
+
+#[test]
+fn open_matches_optimized_for_small_steps() {
+ let input = fs::read_to_string("inputs/day_21.txt").unwrap();
+ let walled_garden = WalledGarden::parser(&input).unwrap().1;
+ let open_garden = OpenGarden::parser(&input).unwrap().1;
+
+ let steps = 132;
+ assert_eq!(
+ walled_garden.count_open_walls_walkable_after_steps(steps),
+ open_garden.count_open_walls_walkable_after_steps(steps)
+ );
+}
+
+#[test]
+fn open_matches_optimized_for_medium_steps() {
+ let input = fs::read_to_string("inputs/day_21.txt").unwrap();
+ let walled_garden = WalledGarden::parser(&input).unwrap().1;
+ let open_garden = OpenGarden::parser(&input).unwrap().1;
+
+ let steps = 65 + 132;
+ assert_eq!(
+ walled_garden.count_open_walls_walkable_after_steps(steps),
+ open_garden.count_open_walls_walkable_after_steps(steps)
+ );
+}
+
+#[test]
+fn open_matches_optimized_for_bigger_steps() {
+ let input = fs::read_to_string("inputs/day_21.txt").unwrap();
+ let walled_garden = WalledGarden::parser(&input).unwrap().1;
+ let open_garden = OpenGarden::parser(&input).unwrap().1;
+
+ let steps = 270;
+ assert_eq!(
+ walled_garden.count_open_walls_walkable_after_steps(steps),
+ open_garden.count_open_walls_walkable_after_steps(steps)
+ );
+}
diff --git a/2023/src/bin/day_22.rs b/2023/src/bin/day_22.rs
new file mode 100644
index 0000000..750f975
--- /dev/null
+++ b/2023/src/bin/day_22.rs
@@ -0,0 +1,161 @@
+use nalgebra::Point3;
+use nom::{
+ bytes::complete::tag,
+ character::complete::{line_ending, u32},
+ combinator::map,
+ multi::separated_list1,
+ sequence::{separated_pair, tuple},
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_22.txt")?;
+ let pile = BrickPile::parser(&input).unwrap().1;
+ let settled_pile = pile.settle();
+ let brickfall_sum = settled_pile.brickfall_sum();
+ dbg!(&brickfall_sum.count_disintegratable_blocks());
+ dbg!(&brickfall_sum.sum_brickfalls());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct BrickPile(Vec<Brick>);
+
+#[derive(Debug)]
+struct SettledBrickPile {
+ bricks: Vec<Brick>,
+ settled_count: usize,
+}
+
+#[derive(Debug, Clone)]
+struct Brick {
+ bottom: Point3<u32>, // the lowest z will always be here. The top might still have the same z.
+ top: Point3<u32>,
+}
+
+#[derive(Debug)]
+struct BrickfallSum(Vec<usize>);
+
+impl BrickPile {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(line_ending, Brick::parser), BrickPile)(input)
+ }
+
+ fn settle(&self) -> SettledBrickPile {
+ let mut settled_bricks = self.0.clone();
+ let mut has_fallen = vec![false; settled_bricks.len()];
+
+ let mut all_settled = false;
+
+ while !all_settled {
+ all_settled = true;
+ for self_i in 0..settled_bricks.len() {
+ let this_brick_is_resting_on_something = settled_bricks[self_i]
+ .is_resting_on_ground()
+ || (0..settled_bricks.len()).any(|other_i| {
+ self_i != other_i
+ && settled_bricks[self_i].is_resting_on_other(&settled_bricks[other_i])
+ });
+
+ if !this_brick_is_resting_on_something {
+ settled_bricks[self_i].fall_one();
+ has_fallen[self_i] = true;
+ all_settled = false;
+ }
+ }
+ }
+
+ SettledBrickPile {
+ bricks: settled_bricks,
+ settled_count: has_fallen.iter().filter(|f| **f).count(),
+ }
+ }
+}
+
+impl Brick {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_pair(point_parser, tag("~"), point_parser),
+ |(a, b)| {
+ if a.z < b.z {
+ Brick { bottom: a, top: b }
+ } else {
+ Brick { top: a, bottom: b }
+ }
+ },
+ )(input)
+ }
+
+ fn min_x(&self) -> u32 {
+ self.bottom.x.min(self.top.x)
+ }
+
+ fn max_x(&self) -> u32 {
+ self.bottom.x.max(self.top.x)
+ }
+
+ fn min_y(&self) -> u32 {
+ self.bottom.y.min(self.top.y)
+ }
+
+ fn max_y(&self) -> u32 {
+ self.bottom.y.max(self.top.y)
+ }
+
+ fn is_resting_on_ground(&self) -> bool {
+ self.bottom.z == 1
+ }
+
+ fn is_resting_on_other(&self, other: &Self) -> bool {
+ self.bottom.z == other.top.z + 1
+ && self.min_x() <= other.max_x()
+ && other.min_x() <= self.max_x()
+ && self.min_y() <= other.max_y()
+ && other.min_y() <= self.max_y()
+ }
+
+ fn fall_one(&mut self) {
+ self.bottom.z -= 1;
+ self.top.z -= 1;
+ }
+}
+
+fn point_parser(input: &str) -> IResult<&str, Point3<u32>> {
+ map(
+ tuple((u32, tag(","), u32, tag(","), u32)),
+ |(x, _, y, _, z)| Point3::new(x, y, z),
+ )(input)
+}
+
+impl SettledBrickPile {
+ fn brickfall_sum(&self) -> BrickfallSum {
+ BrickfallSum(
+ (0..self.bricks.len())
+ .map(|self_i| {
+ self.count_bricks_that_would_fall_if_this_one_is_disintegrated(self_i)
+ })
+ .collect(),
+ )
+ }
+
+ fn count_bricks_that_would_fall_if_this_one_is_disintegrated(&self, i: usize) -> usize {
+ let mut unsettled_bricks = self.bricks.clone();
+ unsettled_bricks.remove(i);
+ let unsettled_bricks = BrickPile(unsettled_bricks);
+
+ let resettled = unsettled_bricks.settle();
+ resettled.settled_count
+ }
+}
+
+impl BrickfallSum {
+ fn count_disintegratable_blocks(&self) -> usize {
+ self.0.iter().filter(|fallen| **fallen == 0).count()
+ }
+
+ fn sum_brickfalls(&self) -> usize {
+ self.0.iter().sum()
+ }
+}
diff --git a/2023/src/bin/day_23.rs b/2023/src/bin/day_23.rs
new file mode 100644
index 0000000..40d0b70
--- /dev/null
+++ b/2023/src/bin/day_23.rs
@@ -0,0 +1,218 @@
+use nalgebra::Point2;
+use nom::{
+ branch::alt,
+ character::complete::{char, line_ending},
+ combinator::{map, value},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::{
+ collections::{HashMap, HashSet},
+ fs,
+};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_23.txt")?;
+ let forest_map = ForestMap::parser(&input).unwrap().1;
+ let slippery_forked_forest_map = forest_map.build_forked_map(true);
+ let dry_forked_forest_map = forest_map.build_forked_map(false);
+
+ dbg!(&slippery_forked_forest_map.longest_end_path_length());
+ dbg!(&dry_forked_forest_map.longest_end_path_length());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct ForestMap(Vec<Vec<ForestTile>>);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum ForestTile {
+ Wall,
+ Open,
+ SlopeUp,
+ SlopeDown,
+ SlopeLeft,
+ SlopeRight,
+}
+
+#[derive(Debug, Clone)]
+struct DecisionNode {
+ explored: HashSet<Point2<usize>>,
+ current: Point2<usize>,
+}
+
+#[derive(Debug)]
+struct ForkedForestMap {
+ start: Point2<usize>,
+ end: Point2<usize>,
+ connections: HashMap<Point2<usize>, Vec<ForkConnection>>,
+}
+
+#[derive(Debug)]
+struct ForkConnection {
+ to: Point2<usize>,
+ distance: usize,
+}
+
+#[derive(Debug, Clone)]
+struct ForkedDecisionNode {
+ explored_forks: HashSet<Point2<usize>>,
+ current: Point2<usize>,
+ current_len: usize,
+}
+
+impl ForestMap {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, many1(ForestTile::parser)),
+ ForestMap,
+ )(input)
+ }
+
+ fn adjacent(
+ &self,
+ p: &Point2<usize>,
+ not_these: &HashSet<Point2<usize>>,
+ slippery: bool,
+ ) -> Vec<Point2<usize>> {
+ let mut adjacent = Vec::with_capacity(4);
+ let tile = self.at(p);
+
+ if p.x > 0 && (!slippery || matches!(tile, ForestTile::Open | ForestTile::SlopeLeft)) {
+ adjacent.push(Point2::new(p.x - 1, p.y));
+ }
+ if p.y > 0 && (!slippery || matches!(tile, ForestTile::Open | ForestTile::SlopeUp)) {
+ adjacent.push(Point2::new(p.x, p.y - 1));
+ }
+ if p.x < self.0[p.y].len() - 1
+ && (!slippery || matches!(tile, ForestTile::Open | ForestTile::SlopeRight))
+ {
+ adjacent.push(Point2::new(p.x + 1, p.y));
+ }
+ if p.y < self.0.len() - 1
+ && (!slippery || matches!(tile, ForestTile::Open | ForestTile::SlopeDown))
+ {
+ adjacent.push(Point2::new(p.x, p.y + 1));
+ }
+
+ adjacent.retain(|adj_p| self.at(adj_p) != ForestTile::Wall && !not_these.contains(adj_p));
+ adjacent
+ }
+
+ fn at(&self, p: &Point2<usize>) -> ForestTile {
+ self.0[p.y][p.x]
+ }
+
+ fn build_forked_map(&self, slippery: bool) -> ForkedForestMap {
+ let start = Point2::new(1, 0);
+ let end = Point2::new(self.0[0].len() - 2, self.0.len() - 1);
+ let mut forks = Vec::new();
+ forks.push(start);
+ forks.push(end);
+
+ for y in 1..self.0.len() - 1 {
+ for x in 1..self.0[y].len() - 1 {
+ let p = Point2::new(x, y);
+ if self.at(&p) != ForestTile::Wall {
+ let adjacent_count = self.adjacent(&p, &HashSet::new(), false).len();
+ if adjacent_count > 2 {
+ forks.push(p);
+ }
+ }
+ }
+ }
+
+ let mut connections = HashMap::new();
+
+ for start_point in &forks {
+ let mut active_nodes = vec![DecisionNode {
+ explored: HashSet::new(),
+ current: start_point.clone(),
+ }];
+ active_nodes[0].explored.insert(start_point.clone());
+
+ let mut fork_connections = Vec::new();
+
+ while let Some(node) = active_nodes.pop() {
+ for adjacent in self.adjacent(&node.current, &node.explored, slippery) {
+ let mut new_node = node.clone();
+ new_node.explored.insert(adjacent);
+ new_node.current = adjacent;
+
+ if forks.contains(&new_node.current) {
+ fork_connections.push(ForkConnection {
+ to: new_node.current,
+ distance: new_node.path_length(),
+ });
+ } else {
+ active_nodes.push(new_node);
+ }
+ }
+ }
+
+ connections.insert(start_point.clone(), fork_connections);
+ }
+
+ ForkedForestMap {
+ start,
+ end,
+ connections,
+ }
+ }
+}
+
+impl ForestTile {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(ForestTile::Wall, char('#')),
+ value(ForestTile::Open, char('.')),
+ value(ForestTile::SlopeUp, char('^')),
+ value(ForestTile::SlopeDown, char('v')),
+ value(ForestTile::SlopeLeft, char('<')),
+ value(ForestTile::SlopeRight, char('>')),
+ ))(input)
+ }
+}
+
+impl DecisionNode {
+ fn path_length(&self) -> usize {
+ self.explored.len() - 1
+ }
+}
+
+impl ForkedForestMap {
+ fn longest_end_path_length(&self) -> usize {
+ let mut active_nodes = vec![ForkedDecisionNode {
+ explored_forks: HashSet::new(),
+ current: self.start,
+ current_len: 0,
+ }];
+ active_nodes[0].explored_forks.insert(self.start);
+ let mut longest_end_path_length: Option<usize> = None;
+
+ while let Some(node) = active_nodes.pop() {
+ for adjacent in self.connections.get(&node.current).unwrap() {
+ if !node.explored_forks.contains(&adjacent.to) {
+ let mut new_node = node.clone();
+ new_node.explored_forks.insert(adjacent.to);
+ new_node.current = adjacent.to;
+ new_node.current_len += adjacent.distance;
+
+ if new_node.current == self.end {
+ longest_end_path_length =
+ if let Some(current_longest) = longest_end_path_length {
+ Some(current_longest.max(new_node.current_len))
+ } else {
+ Some(new_node.current_len)
+ };
+ } else {
+ active_nodes.push(new_node);
+ }
+ }
+ }
+ }
+
+ longest_end_path_length.unwrap()
+ }
+}
diff --git a/2023/src/bin/day_24.rs b/2023/src/bin/day_24.rs
new file mode 100644
index 0000000..1f22169
--- /dev/null
+++ b/2023/src/bin/day_24.rs
@@ -0,0 +1,242 @@
+use nalgebra::{Matrix2, Matrix6, Point3, RowVector6, Vector2, Vector3, Vector6};
+use nom::{
+ bytes::complete::tag,
+ character::complete::{i64, line_ending},
+ combinator::map,
+ multi::separated_list1,
+ sequence::{separated_pair, tuple},
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_24.txt")?;
+ let parsed = Hailstones::parser(&input).unwrap().1;
+ dbg!(&parsed.count_intersections_2d(200000000000000., 400000000000000.));
+
+ let magic_rock = parsed.find_magic_throwing_rock();
+ dbg!(&magic_rock);
+ dbg!(
+ magic_rock.position.x as i64 + magic_rock.position.y as i64 + magic_rock.position.z as i64
+ );
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct Hailstones(Vec<Hailstone>);
+
+#[derive(Debug)]
+struct Hailstone {
+ position: Point3<f64>,
+ velocity: Vector3<f64>,
+}
+
+impl Hailstones {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(line_ending, Hailstone::parser), Hailstones)(input)
+ }
+
+ fn count_intersections_2d(&self, min: f64, max: f64) -> usize {
+ self.0
+ .iter()
+ .enumerate()
+ .map(|(i, hailstone)| {
+ self.0
+ .iter()
+ .skip(i + 1)
+ .filter(|other_hailstone| hailstone.intersects_2d(other_hailstone, min, max))
+ .count()
+ })
+ .sum()
+ }
+
+ fn find_magic_throwing_rock(&self) -> Hailstone {
+ (0..self.0.len())
+ .flat_map(move |h1| {
+ (h1 + 1..self.0.len()).flat_map(move |h2| {
+ (h2 + 1..self.0.len())
+ .flat_map(move |h3| (h3 + 1..self.0.len()).map(move |h4| [h1, h2, h3, h4]))
+ })
+ })
+ .take(1000000)
+ .map(|hailstones| {
+ let rock = self.find_magic_throwing_rock_for_hailstones(hailstones);
+ (
+ // the solution I'm after uses integers. This tries to find the minimum error.
+ rock.position.x.abs().fract()
+ + rock.position.y.abs().fract()
+ + rock.position.z.abs().fract()
+ + rock.velocity.x.abs().fract()
+ + rock.velocity.y.abs().fract()
+ + rock.velocity.z.abs().fract()
+ + self
+ .0
+ .iter()
+ .map(|h| rock.collition_time(&h).fract())
+ .sum::<f64>(),
+ rock,
+ )
+ })
+ .min_by(|(error_a, _), (error_b, _)| error_a.total_cmp(error_b))
+ .unwrap()
+ .1
+ }
+
+ fn find_magic_throwing_rock_for_hailstones(&self, hailstones: [usize; 4]) -> Hailstone {
+ // unknowns are (x, y, z, dx, dy, dz)
+ let h1 = &self.0[hailstones[0]];
+ let h2 = &self.0[hailstones[1]];
+ let h3 = &self.0[hailstones[2]];
+ let h4 = &self.0[hailstones[3]];
+
+ let coefficients: Matrix6<f64> = Matrix6::from_rows(&[
+ RowVector6::new(
+ h2.velocity.y - h1.velocity.y,
+ h1.velocity.x - h2.velocity.x,
+ 0.,
+ h1.position.y - h2.position.y,
+ h2.position.x - h1.position.x,
+ 0.,
+ ),
+ RowVector6::new(
+ h2.velocity.z - h1.velocity.z,
+ 0.,
+ h1.velocity.x - h2.velocity.x,
+ h1.position.z - h2.position.z,
+ 0.,
+ h2.position.x - h1.position.x,
+ ),
+ RowVector6::new(
+ 0.,
+ h2.velocity.z - h1.velocity.z,
+ h1.velocity.y - h2.velocity.y,
+ 0.,
+ h1.position.z - h2.position.z,
+ h2.position.y - h1.position.y,
+ ),
+ RowVector6::new(
+ h4.velocity.y - h3.velocity.y,
+ h3.velocity.x - h4.velocity.x,
+ 0.,
+ h3.position.y - h4.position.y,
+ h4.position.x - h3.position.x,
+ 0.,
+ ),
+ RowVector6::new(
+ h4.velocity.z - h3.velocity.z,
+ 0.,
+ h3.velocity.x - h4.velocity.x,
+ h3.position.z - h4.position.z,
+ 0.,
+ h4.position.x - h3.position.x,
+ ),
+ RowVector6::new(
+ 0.,
+ h4.velocity.z - h3.velocity.z,
+ h3.velocity.y - h4.velocity.y,
+ 0.,
+ h3.position.z - h4.position.z,
+ h4.position.y - h3.position.y,
+ ),
+ ]);
+ let constants: Vector6<f64> = Vector6::new(
+ h1.position.y * h1.velocity.x
+ - h1.position.x * h1.velocity.y
+ - h2.position.y * h2.velocity.x
+ + h2.position.x * h2.velocity.y,
+ h1.position.z * h1.velocity.x
+ - h1.position.x * h1.velocity.z
+ - h2.position.z * h2.velocity.x
+ + h2.position.x * h2.velocity.z,
+ h1.position.z * h1.velocity.y
+ - h1.position.y * h1.velocity.z
+ - h2.position.z * h2.velocity.y
+ + h2.position.y * h2.velocity.z,
+ h3.position.y * h3.velocity.x
+ - h3.position.x * h3.velocity.y
+ - h4.position.y * h4.velocity.x
+ + h4.position.x * h4.velocity.y,
+ h3.position.z * h3.velocity.x
+ - h3.position.x * h3.velocity.z
+ - h4.position.z * h4.velocity.x
+ + h4.position.x * h4.velocity.z,
+ h3.position.z * h3.velocity.y
+ - h3.position.y * h3.velocity.z
+ - h4.position.z * h4.velocity.y
+ + h4.position.y * h4.velocity.z,
+ );
+
+ if let Some(coefficients_inverse) = coefficients.try_inverse() {
+ let unknowns = coefficients_inverse * constants;
+
+ Hailstone {
+ position: Point3::new(unknowns[0], unknowns[1], unknowns[2]),
+ velocity: Vector3::new(unknowns[3], unknowns[4], unknowns[5]),
+ }
+ } else {
+ panic!("No solution found, matrix didn't invert")
+ }
+ }
+}
+
+impl Hailstone {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_pair(
+ map(
+ tuple((i64, tag(", "), i64, tag(", "), i64)),
+ |(x, _, y, _, z)| Point3::new(x as f64, y as f64, z as f64),
+ ),
+ tag(" @ "),
+ map(
+ tuple((i64, tag(", "), i64, tag(", "), i64)),
+ |(x, _, y, _, z)| Vector3::new(x as f64, y as f64, z as f64),
+ ),
+ ),
+ |(initial_position, velocity)| Hailstone {
+ position: initial_position,
+ velocity,
+ },
+ )(input)
+ }
+
+ fn intersects_2d(&self, other: &Hailstone, min: f64, max: f64) -> bool {
+ let variables = Matrix2::new(
+ self.velocity.x,
+ -other.velocity.x,
+ self.velocity.y,
+ -other.velocity.y,
+ );
+ let constants = Vector2::new(
+ other.position.x - self.position.x,
+ other.position.y - self.position.y,
+ );
+
+ if let Some(variables_inverse) = variables.try_inverse() {
+ let intersection = variables_inverse * constants;
+ let self_t = intersection.x;
+ let other_t = intersection.y;
+
+ let intersection = self.position.xy() + self.velocity.xy() * self_t;
+ self_t >= 0.
+ && other_t >= 0.
+ && intersection.x >= min
+ && intersection.x <= max
+ && intersection.y >= min
+ && intersection.y <= max
+ } else {
+ false
+ }
+ }
+
+ /// This is only intended for hail that definitely collides!
+ fn collition_time(&self, other: &Hailstone) -> f64 {
+ let tx = (self.position.x - other.position.x) / (other.velocity.x - self.velocity.x);
+ let ty = (self.position.y - other.position.y) / (other.velocity.y - self.velocity.y);
+ let tz = (self.position.z - other.position.z) / (other.velocity.z - self.velocity.z);
+ let t = tx.max(ty).max(tz); // sometimes one of these is zero!
+ assert!(t > 0.);
+ t
+ }
+}
diff --git a/2023/src/bin/day_25.rs b/2023/src/bin/day_25.rs
new file mode 100644
index 0000000..7bd3751
--- /dev/null
+++ b/2023/src/bin/day_25.rs
@@ -0,0 +1,154 @@
+use nom::{
+ bytes::complete::tag,
+ character::complete::{alpha1, line_ending, space1},
+ combinator::map,
+ multi::separated_list1,
+ sequence::separated_pair,
+ IResult,
+};
+use std::{collections::BTreeMap, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_25.txt")?;
+ let parsed = Circuit::parser(&input).unwrap().1;
+ let three_cut_partition_sizes = parsed.find_non_zero_three_cut_partition_sizes();
+ dbg!(&three_cut_partition_sizes);
+ dbg!(three_cut_partition_sizes.0 * three_cut_partition_sizes.1);
+
+ Ok(())
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+struct VertexId(usize);
+
+#[derive(Debug)]
+struct Circuit {
+ wires: Vec<Vec<VertexId>>,
+}
+
+#[derive(Debug, Clone, Eq)]
+struct Edge {
+ a: VertexId,
+ b: VertexId,
+}
+
+impl PartialEq for Edge {
+ fn eq(&self, other: &Self) -> bool {
+ (self.a == other.a && self.b == other.b) || (self.a == other.b && self.b == other.a)
+ }
+}
+
+impl Circuit {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(
+ line_ending,
+ separated_pair(
+ map(alpha1, |s: &str| s.to_owned()),
+ tag(": "),
+ separated_list1(space1, map(alpha1, |s: &str| s.to_owned())),
+ ),
+ ),
+ |vertices| {
+ let mut vertex_id_mapping = BTreeMap::new();
+ let mut wires = Vec::new();
+
+ for (from, tos) in vertices {
+ let from_id = vertex_id_mapping
+ .entry(from)
+ .or_insert_with(|| {
+ let new_id = VertexId(wires.len());
+ wires.push(Vec::new());
+ new_id
+ })
+ .clone();
+ for to in tos {
+ let to_id = vertex_id_mapping
+ .entry(to)
+ .or_insert_with(|| {
+ let new_id = VertexId(wires.len());
+ wires.push(Vec::new());
+ new_id
+ })
+ .clone();
+ wires[from_id.0].push(to_id);
+ wires[to_id.0].push(from_id);
+ }
+ }
+ Circuit { wires }
+ },
+ )(input)
+ }
+
+ fn find_non_zero_three_cut_partition_sizes(&self) -> (usize, usize) {
+ let cut1s = self.edges_to_traverse_everything([]);
+ for (cut1i, cut1) in cut1s.iter().enumerate() {
+ let cut2s = self.edges_to_traverse_everything([cut1.clone()]);
+ for (cut2i, cut2) in cut2s.iter().enumerate() {
+ if cut1s[0..cut1i].contains(&cut2) {
+ continue;
+ }
+ for cut3 in self.edges_to_traverse_everything([cut1.clone(), cut2.clone()]) {
+ if cut1s[0..cut1i].contains(&cut3) || cut2s[0..cut2i].contains(&cut3) {
+ // if (cut2, *) didn't work, then (*, cut2) also wouldn't work.
+ continue;
+ }
+ let (size1, size2) = self.partition_sizes([cut1.clone(), cut2.clone(), cut3]);
+ if size2 > 0 {
+ return (size1, size2);
+ }
+ }
+ }
+ }
+ panic!("No partitions with three cuts");
+ }
+
+ fn partition_sizes(&self, cuts: [Edge; 3]) -> (usize, usize) {
+ let mut visited = vec![false; self.wires.len()];
+ let mut frontier = Vec::new();
+
+ visited[0] = true;
+ frontier.push(VertexId(0));
+ let mut visited_count = 1;
+
+ while let Some(from) = frontier.pop() {
+ for to in &self.wires[from.0] {
+ let edge = Edge::new(from, *to);
+ if !cuts.contains(&edge) && !visited[to.0] {
+ visited[to.0] = true;
+ visited_count += 1;
+ frontier.push(*to);
+ }
+ }
+ }
+
+ (visited_count, self.wires.len() - visited_count)
+ }
+
+ fn edges_to_traverse_everything<const N: usize>(&self, cuts: [Edge; N]) -> Vec<Edge> {
+ let mut visited = vec![false; self.wires.len()];
+ let mut frontier = Vec::new();
+
+ visited[0] = true;
+ frontier.push(VertexId(0));
+ let mut used_edges = Vec::new();
+
+ while let Some(from) = frontier.pop() {
+ for to in &self.wires[from.0] {
+ let edge = Edge::new(from, *to);
+ if !cuts.contains(&edge) && !visited[to.0] {
+ visited[to.0] = true;
+ used_edges.push(edge);
+ frontier.push(*to);
+ }
+ }
+ }
+ used_edges
+ }
+}
+
+impl Edge {
+ fn new(a: VertexId, b: VertexId) -> Edge {
+ Edge { a, b }
+ }
+}
diff --git a/2023/src/bin/day_3.rs b/2023/src/bin/day_3.rs
new file mode 100644
index 0000000..06e1300
--- /dev/null
+++ b/2023/src/bin/day_3.rs
@@ -0,0 +1,149 @@
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::{digit1, line_ending, none_of},
+ combinator::{map, map_res},
+ multi::{many1, separated_list1},
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_3.txt")?;
+ let parsed = PartInventory::parser(&input).unwrap().1;
+ dbg!(&parsed.part_number_sum());
+ dbg!(&parsed.gear_ratio_sum());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct PartInventory {
+ parts: Vec<Part>,
+ symbols: Vec<Symbol>,
+}
+
+#[derive(Debug)]
+struct Part {
+ number: u32,
+ symbols: Vec<char>,
+ y: usize,
+ min_x: usize,
+ max_x: usize,
+}
+
+#[derive(Debug, Clone)]
+struct Symbol {
+ symbol: char,
+ parts: Vec<u32>,
+ x: usize,
+ y: usize,
+}
+
+#[derive(Debug)]
+enum LexToken {
+ Space(usize),
+ Part(usize, u32),
+ Symbol(char),
+}
+
+impl PartInventory {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(LexToken::parser, |tokens| {
+ let mut parts = Vec::new();
+ let mut symbols = Vec::new();
+
+ for (y, row) in tokens.iter().enumerate() {
+ let mut x = 0;
+ for token in row {
+ match token {
+ LexToken::Space(_) => {}
+ LexToken::Part(len, number) => parts.push(Part {
+ number: *number,
+ symbols: Vec::new(),
+ y,
+ min_x: x,
+ max_x: x + len - 1,
+ }),
+ LexToken::Symbol(symbol) => symbols.push(Symbol {
+ symbol: *symbol,
+ parts: Vec::new(),
+ y,
+ x,
+ }),
+ }
+ x += token.len();
+ }
+ }
+
+ for part in &mut parts {
+ part.symbols = symbols
+ .iter()
+ .filter(|symbol| part.touches(symbol))
+ .map(|symbol| symbol.symbol)
+ .collect();
+ }
+
+ for symbol in &mut symbols {
+ symbol.parts = parts
+ .iter()
+ .filter(|part| part.touches(symbol))
+ .map(|part| part.number)
+ .collect();
+ }
+
+ PartInventory { parts, symbols }
+ })(input)
+ }
+
+ fn part_number_sum(&self) -> u32 {
+ self.parts
+ .iter()
+ .filter(|part| part.symbols.len() > 0)
+ .map(|part| part.number)
+ .sum()
+ }
+
+ fn gear_ratio_sum(&self) -> u32 {
+ self.symbols
+ .iter()
+ .filter(|symbol| symbol.symbol == '*' && symbol.parts.len() == 2)
+ .map(|symbol| symbol.parts[0] * symbol.parts[1])
+ .sum()
+ }
+}
+
+impl LexToken {
+ fn parser(input: &str) -> IResult<&str, Vec<Vec<Self>>> {
+ separated_list1(
+ line_ending,
+ many1(alt((
+ map(many1(tag(".")), |dots| LexToken::Space(dots.len())),
+ map_res(digit1, |num_s: &str| {
+ num_s
+ .parse()
+ .map(|num_i| LexToken::Part(num_s.len(), num_i))
+ }),
+ map(none_of("\n"), |s| LexToken::Symbol(s)),
+ ))),
+ )(input)
+ }
+
+ fn len(&self) -> usize {
+ match self {
+ Self::Space(len) => *len,
+ Self::Part(len, _) => *len,
+ Self::Symbol(_) => 1,
+ }
+ }
+}
+
+impl Part {
+ fn touches(&self, symbol: &Symbol) -> bool {
+ let part = self;
+ symbol.x >= part.min_x.saturating_sub(1)
+ && symbol.x <= part.max_x.saturating_add(1)
+ && symbol.y >= part.y.saturating_sub(1)
+ && symbol.y <= part.y.saturating_add(1)
+ }
+}
diff --git a/2023/src/bin/day_4.rs b/2023/src/bin/day_4.rs
new file mode 100644
index 0000000..7f5af6b
--- /dev/null
+++ b/2023/src/bin/day_4.rs
@@ -0,0 +1,92 @@
+use nom::{
+ bytes::complete::tag,
+ character::complete::{line_ending, space1, u32 as nom_u32},
+ combinator::map,
+ multi::separated_list1,
+ sequence::tuple,
+ IResult,
+};
+use std::{collections::BTreeSet, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_4.txt")?;
+ let parsed = Scratchcards::parser(&input).unwrap().1;
+ dbg!(&parsed.points());
+ dbg!(&parsed.scratchcard_explosion_sum());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct Scratchcards(Vec<Scratchcard>);
+
+#[derive(Debug)]
+struct Scratchcard {
+ winning: BTreeSet<u32>,
+ owned: BTreeSet<u32>,
+}
+
+impl Scratchcards {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, Scratchcard::parser),
+ Scratchcards,
+ )(input)
+ }
+
+ fn points(&self) -> usize {
+ self.0.iter().map(|card| card.points()).sum()
+ }
+
+ fn scratchcard_explosion_sum(&self) -> u64 {
+ let mut scratchcard_pile = vec![1_u64.into(); self.0.len()];
+
+ for i in 0..scratchcard_pile.len() {
+ let points = self.0[i].winning_numbers();
+ let new_scratchcards: u64 = scratchcard_pile[i];
+ for offset in 0..points {
+ if let Some(pile) = scratchcard_pile.get_mut(i + 1 + offset) {
+ *pile += new_scratchcards;
+ }
+ }
+ }
+
+ scratchcard_pile.into_iter().sum()
+ }
+}
+
+impl Scratchcard {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ tag("Card"),
+ space1,
+ nom_u32,
+ tag(":"),
+ space1,
+ separated_list1(space1, nom_u32),
+ space1,
+ tag("|"),
+ space1,
+ separated_list1(space1, nom_u32),
+ )),
+ |(_, _, _, _, _, winning, _, _, _, owned)| Scratchcard {
+ winning: winning.into_iter().collect(),
+ owned: owned.into_iter().collect(),
+ },
+ )(input)
+ }
+
+ fn winning_numbers(&self) -> usize {
+ self.winning.intersection(&self.owned).count()
+ }
+
+ fn points(&self) -> usize {
+ let winning_numbers = self.winning_numbers();
+ if winning_numbers > 0 {
+ 2_usize.pow(winning_numbers as u32 - 1)
+ } else {
+ 0
+ }
+ }
+}
diff --git a/2023/src/bin/day_5.rs b/2023/src/bin/day_5.rs
new file mode 100644
index 0000000..2243bf8
--- /dev/null
+++ b/2023/src/bin/day_5.rs
@@ -0,0 +1,257 @@
+use nom::{
+ bytes::complete::tag,
+ character::complete::{line_ending, not_line_ending, space1, u64 as nom_u64},
+ combinator::map,
+ multi::separated_list1,
+ sequence::{pair, tuple},
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_5.txt")?;
+ let parsed = Almanac::parser(&input).unwrap().1;
+ dbg!(&parsed.min_location());
+ dbg!(&parsed.range_min_location());
+ Ok(())
+}
+
+#[derive(Debug)]
+struct Almanac {
+ seeds: Vec<u64>,
+ seed_ranges: Vec<Range>,
+ mappings: Vec<AlmanacMapping>,
+}
+
+#[derive(Debug)]
+struct AlmanacMapping {
+ _heading: String,
+ maps: Vec<Mapping>,
+}
+
+#[derive(Debug)]
+struct Mapping {
+ source_start: u64,
+ dest_start: u64,
+ len: u64,
+}
+
+#[derive(Debug, Clone, Copy)]
+struct Range {
+ start: u64,
+ len: u64,
+}
+
+impl Almanac {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ tag("seeds: "),
+ separated_list1(space1, nom_u64),
+ pair(line_ending, line_ending),
+ separated_list1(pair(line_ending, line_ending), AlmanacMapping::parser),
+ )),
+ |(_, seeds, _, mappings)| {
+ let seed_ranges = seeds
+ .chunks(2)
+ .map(|chunk| Range {
+ start: chunk[0],
+ len: chunk[1],
+ })
+ .collect();
+ Almanac {
+ seeds,
+ seed_ranges,
+ mappings,
+ }
+ },
+ )(input)
+ }
+
+ fn locations(&self) -> Vec<u64> {
+ self.mappings
+ .iter()
+ .fold(self.seeds.clone(), |acc, mapping| {
+ acc.into_iter().map(|seed| mapping.convert(seed)).collect()
+ })
+ }
+
+ fn min_location(&self) -> u64 {
+ self.locations().into_iter().min().unwrap()
+ }
+
+ fn range_locations(&self) -> Vec<Range> {
+ self.mappings
+ .iter()
+ .fold(self.seed_ranges.clone(), |acc, mapping| {
+ dbg!(&acc);
+ acc.into_iter()
+ .flat_map(|seed_range| mapping.convert_range(seed_range))
+ .collect()
+ })
+ }
+
+ fn range_min_location(&self) -> u64 {
+ self.range_locations()
+ .into_iter()
+ .map(|r| r.start)
+ .min()
+ .unwrap()
+ }
+}
+
+impl AlmanacMapping {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ not_line_ending,
+ line_ending,
+ separated_list1(line_ending, Mapping::parser),
+ )),
+ |(heading, _, maps)| AlmanacMapping {
+ _heading: heading.to_owned(),
+ maps,
+ },
+ )(input)
+ }
+
+ fn convert(&self, source: u64) -> u64 {
+ self.maps
+ .iter()
+ .filter_map(|mapping| mapping.convert(source))
+ .next()
+ .unwrap_or(source)
+ }
+
+ fn convert_range(&self, source: Range) -> Vec<Range> {
+ let converted_ranges: Vec<(Range, Range)> = self
+ .maps
+ .iter()
+ .filter_map(|mapping| mapping.convert_range(source))
+ .collect();
+
+ let mut result = Vec::new();
+ let mut uncovered_ranges = vec![source];
+ for (covered_source, dest) in converted_ranges {
+ result.push(dest);
+
+ uncovered_ranges = uncovered_ranges
+ .into_iter()
+ .flat_map(|r| r.difference(covered_source))
+ .collect();
+ }
+
+ result.append(&mut uncovered_ranges);
+
+ result
+ }
+}
+
+impl Mapping {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((nom_u64, space1, nom_u64, space1, nom_u64)),
+ |(dest_start, _, source_start, _, len)| Mapping {
+ source_start,
+ dest_start,
+ len,
+ },
+ )(input)
+ }
+
+ fn source_range(&self) -> Range {
+ Range {
+ start: self.source_start,
+ len: self.len,
+ }
+ }
+
+ fn convert(&self, source: u64) -> Option<u64> {
+ if source >= self.source_start && source < self.source_start + self.len {
+ Some(source - self.source_start + self.dest_start)
+ } else {
+ None
+ }
+ }
+
+ fn convert_range(&self, source: Range) -> Option<(Range, Range)> {
+ if let Some(source_intersection) = source.intersection(self.source_range()) {
+ Some((
+ source_intersection,
+ Range {
+ start: source_intersection.start - self.source_start + self.dest_start,
+ len: source_intersection.len,
+ },
+ ))
+ } else {
+ None
+ }
+ }
+}
+
+impl Range {
+ fn end(&self) -> u64 {
+ self.start + self.len
+ }
+
+ fn intersection(self, other: Range) -> Option<Range> {
+ if self.start < other.start {
+ if self.end() <= other.start {
+ None
+ } else if self.end() < other.end() {
+ Some(Range {
+ start: other.start,
+ len: self.end() - other.start,
+ })
+ } else {
+ Some(other)
+ }
+ } else if self.start < other.end() {
+ if self.end() < other.end() {
+ Some(self)
+ } else {
+ Some(Range {
+ start: self.start,
+ len: other.end() - self.start,
+ })
+ }
+ } else {
+ None
+ }
+ }
+
+ fn difference(self, other: Range) -> Vec<Range> {
+ if self.start < other.start {
+ if self.end() <= other.start {
+ vec![self]
+ } else if self.end() <= other.end() {
+ vec![Range {
+ start: self.start,
+ len: other.start - self.start,
+ }]
+ } else {
+ vec![
+ Range {
+ start: self.start,
+ len: other.start - self.start,
+ },
+ Range {
+ start: other.end(),
+ len: self.end() - other.end(),
+ },
+ ]
+ }
+ } else if self.start < other.end() {
+ if self.end() <= other.end() {
+ vec![]
+ } else {
+ vec![Range {
+ start: other.end(),
+ len: self.end() - other.end(),
+ }]
+ }
+ } else {
+ vec![self]
+ }
+ }
+}
diff --git a/2023/src/bin/day_6.rs b/2023/src/bin/day_6.rs
new file mode 100644
index 0000000..138843e
--- /dev/null
+++ b/2023/src/bin/day_6.rs
@@ -0,0 +1,70 @@
+use nom::{
+ bytes::complete::tag,
+ character::complete::{line_ending, space1, u64 as nom_u64},
+ combinator::map,
+ multi::separated_list1,
+ sequence::tuple,
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_6.txt")?;
+ let parsed = RacePlan::multi_parser(&input).unwrap().1;
+ for plan in parsed {
+ dbg!(&plan.win_count_product());
+ }
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct RacePlan(Vec<RaceRecord>);
+
+#[derive(Debug)]
+struct RaceRecord {
+ time: u64,
+ distance: u64,
+}
+
+impl RacePlan {
+ fn multi_parser(input: &str) -> IResult<&str, Vec<Self>> {
+ separated_list1(line_ending, RacePlan::parser)(input)
+ }
+
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ tuple((
+ tag("Time:"),
+ space1,
+ separated_list1(space1, nom_u64),
+ line_ending,
+ tag("Distance:"),
+ space1,
+ separated_list1(space1, nom_u64),
+ )),
+ |(_, _, times, _, _, _, distances)| {
+ RacePlan(
+ times
+ .into_iter()
+ .zip(distances.into_iter())
+ .map(|(time, distance)| RaceRecord { time, distance })
+ .collect(),
+ )
+ },
+ )(input)
+ }
+
+ fn win_count_product(&self) -> usize {
+ self.0.iter().map(|r| r.count_wins()).product()
+ }
+}
+
+impl RaceRecord {
+ fn count_wins(&self) -> usize {
+ (1..self.time)
+ .map(|charge_time| charge_time * (self.time - charge_time))
+ .filter(|distance| *distance > self.distance)
+ .count()
+ }
+}
diff --git a/2023/src/bin/day_7.rs b/2023/src/bin/day_7.rs
new file mode 100644
index 0000000..3b629bd
--- /dev/null
+++ b/2023/src/bin/day_7.rs
@@ -0,0 +1,169 @@
+use nom::{
+ branch::alt,
+ character::complete::{char as nom_char, line_ending, space1, u32 as nom_u32},
+ combinator::{map, value},
+ multi::separated_list1,
+ sequence::tuple,
+ IResult,
+};
+use std::{collections::BTreeMap, fs};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_7.txt")?;
+
+ {
+ let mut games_without_jokers = CardGame::parser(false)(&input).unwrap().1;
+ games_without_jokers.sort();
+ dbg!(games_without_jokers.calculate_winnings());
+ }
+
+ {
+ let mut games_with_jokers = CardGame::parser(true)(&input).unwrap().1;
+ games_with_jokers.sort();
+ dbg!(games_with_jokers.calculate_winnings());
+ }
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct CardGame(Vec<CardHand>);
+
+#[derive(Debug, PartialEq, Eq)]
+struct CardHand {
+ cards: [Card; 5],
+ bid: u32,
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
+struct Card(u8);
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
+enum CardRank {
+ HighCard,
+ OnePair,
+ TwoPair,
+ ThreeOfAKind,
+ FullHouse,
+ FourOfAKind,
+ FiveOfAKind,
+}
+
+impl CardGame {
+ fn parser(with_jokers: bool) -> impl FnMut(&str) -> IResult<&str, Self> {
+ move |input: &str| {
+ map(
+ separated_list1(line_ending, CardHand::parser(with_jokers)),
+ CardGame,
+ )(input)
+ }
+ }
+
+ fn sort(&mut self) {
+ self.0.sort()
+ }
+
+ fn calculate_winnings(&self) -> u32 {
+ self.0
+ .iter()
+ .enumerate()
+ .map(|(i, hand)| hand.bid * (i as u32 + 1))
+ .sum()
+ }
+}
+
+impl CardHand {
+ fn parser(with_jokers: bool) -> impl FnMut(&str) -> IResult<&str, Self> {
+ move |input: &str| {
+ map(
+ tuple((
+ Card::parser(with_jokers),
+ Card::parser(with_jokers),
+ Card::parser(with_jokers),
+ Card::parser(with_jokers),
+ Card::parser(with_jokers),
+ space1,
+ nom_u32,
+ )),
+ |(c1, c2, c3, c4, c5, _, bid)| CardHand {
+ cards: [c1, c2, c3, c4, c5],
+ bid,
+ },
+ )(input)
+ }
+ }
+
+ fn card_rank(&self) -> CardRank {
+ let mut cards_set: BTreeMap<Card, u8> = BTreeMap::new();
+ for card in &self.cards {
+ *cards_set.entry(*card).or_insert(0) += 1;
+ }
+
+ let jokers = cards_set.get(&Card(1)).cloned().unwrap_or(0);
+ cards_set.remove(&Card(1));
+
+ let mut card_counts: Vec<u8> = cards_set.into_values().collect();
+ card_counts.sort_by(|a, b| b.cmp(a));
+
+ if card_counts.len() == 0 {
+ // all 5 were jokers!
+ CardRank::FiveOfAKind
+ } else {
+ card_counts[0] += jokers;
+ if card_counts[0] == 5 {
+ CardRank::FiveOfAKind
+ } else if card_counts[0] == 4 {
+ CardRank::FourOfAKind
+ } else if card_counts[0] == 3 && card_counts[1] == 2 {
+ CardRank::FullHouse
+ } else if card_counts[0] == 3 {
+ CardRank::ThreeOfAKind
+ } else if card_counts[0] == 2 && card_counts[1] == 2 {
+ CardRank::TwoPair
+ } else if card_counts[0] == 2 {
+ CardRank::OnePair
+ } else {
+ CardRank::HighCard
+ }
+ }
+ }
+}
+
+impl Card {
+ fn parser(with_jokers: bool) -> impl FnMut(&str) -> IResult<&str, Self> {
+ move |input: &str| {
+ map(
+ alt((
+ value(2, nom_char('2')),
+ value(3, nom_char('3')),
+ value(4, nom_char('4')),
+ value(5, nom_char('5')),
+ value(6, nom_char('6')),
+ value(7, nom_char('7')),
+ value(8, nom_char('8')),
+ value(9, nom_char('9')),
+ value(10, nom_char('T')),
+ value(if with_jokers { 1 } else { 11 }, nom_char('J')),
+ value(12, nom_char('Q')),
+ value(13, nom_char('K')),
+ value(14, nom_char('A')),
+ )),
+ Card,
+ )(input)
+ }
+ }
+}
+
+impl std::cmp::Ord for CardHand {
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.card_rank()
+ .cmp(&other.card_rank())
+ .then(self.cards.cmp(&other.cards))
+ }
+}
+
+impl std::cmp::PartialOrd for CardHand {
+ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+ Some(self.cmp(other))
+ }
+}
diff --git a/2023/src/bin/day_8.rs b/2023/src/bin/day_8.rs
new file mode 100644
index 0000000..2da3cf5
--- /dev/null
+++ b/2023/src/bin/day_8.rs
@@ -0,0 +1,213 @@
+use nom::{
+ branch::alt,
+ bytes::complete::tag,
+ character::complete::{alpha1, char as nom_char, line_ending},
+ combinator::{map, value},
+ multi::{many1, separated_list1},
+ sequence::{pair, preceded, separated_pair, terminated},
+ IResult,
+};
+use std::{collections::BTreeMap, fs, ops::Range};
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_8.txt")?;
+ let mut directions = Directions::parser(&input).unwrap().1;
+ dbg!(directions.steps_from_a_to_z());
+ dbg!(directions.ghost_steps_from_a_to_z());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct Directions {
+ turns: Vec<Turn>,
+ packed_map: Vec<PackedFork>,
+ packed_ghost_starts: Range<u16>,
+ packed_ghost_destinations: Range<u16>,
+ distance_to_ghost_dest_cache: BTreeMap<(u16, usize), (u16, usize)>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum Turn {
+ Left,
+ Right,
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
+struct Location(String);
+
+#[derive(Debug)]
+struct Fork {
+ left: Location,
+ right: Location,
+}
+
+#[derive(Debug)]
+struct PackedFork {
+ left: u16,
+ right: u16,
+}
+
+impl Directions {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_pair(
+ many1(Turn::parser),
+ pair(line_ending, line_ending),
+ separated_list1(
+ line_ending,
+ separated_pair(Location::parser, tag(" = "), Fork::parser),
+ ),
+ ),
+ |(turns, map)| {
+ let map: BTreeMap<Location, Fork> = map.into_iter().collect();
+ let mut locations: Vec<Location> = map.keys().cloned().collect();
+ locations.sort_by_key(|l| l.0.chars().rev().collect::<String>());
+
+ Directions {
+ turns,
+ packed_map: locations
+ .iter()
+ .map(|l| {
+ let unpacked_fork = map.get(l).unwrap();
+ PackedFork {
+ left: locations
+ .iter()
+ .position(|f| *f == unpacked_fork.left)
+ .unwrap() as u16,
+ right: locations
+ .iter()
+ .position(|f| *f == unpacked_fork.right)
+ .unwrap() as u16,
+ }
+ })
+ .collect(),
+ packed_ghost_starts: 0
+ ..locations.iter().position(|l| !l.ghost_start()).unwrap() as u16,
+ packed_ghost_destinations: locations.iter().position(|l| l.ghost_end()).unwrap()
+ as u16
+ ..locations.len() as u16,
+ distance_to_ghost_dest_cache: BTreeMap::new(),
+ }
+ },
+ )(input)
+ }
+
+ fn steps_from_a_to_z(&self) -> usize {
+ let mut step_count = 0;
+ let mut current_location: u16 = 0;
+
+ for dir in self.turns.iter().cycle() {
+ let current_fork: &PackedFork = &self.packed_map[current_location as usize];
+ current_location = match dir {
+ Turn::Left => current_fork.left,
+ Turn::Right => current_fork.right,
+ };
+ step_count += 1;
+
+ if current_location == self.packed_map.len() as u16 - 1 {
+ return step_count;
+ }
+ }
+ unreachable!()
+ }
+
+ fn ghost_steps_from_a_to_z(&mut self) -> usize {
+ let mut current_locations: Vec<(u16, usize)> = self
+ .packed_ghost_starts
+ .clone()
+ .map(|start| self.ghost_step_to_next_end(start, 0))
+ .collect();
+
+ let mut any_stepped = true;
+ while any_stepped {
+ any_stepped = false;
+
+ let current_max = current_locations
+ .iter()
+ .map(|(_, steps)| steps.clone())
+ .max()
+ .unwrap();
+
+ for current_location in &mut current_locations {
+ if current_location.1 < current_max {
+ any_stepped = true;
+ let (new_location, extra_steps) =
+ self.ghost_step_to_next_end(current_location.0, current_location.1);
+ current_location.0 = new_location;
+ current_location.1 += extra_steps;
+ }
+ }
+ }
+
+ current_locations[0].1
+ }
+
+ fn ghost_step_to_next_end(&mut self, start: u16, current_step_count: usize) -> (u16, usize) {
+ self.distance_to_ghost_dest_cache
+ .entry((start, current_step_count % self.turns.len()))
+ .or_insert_with(|| {
+ let mut step_count = 0;
+ let mut current_location: u16 = start;
+
+ for dir in self
+ .turns
+ .iter()
+ .skip(current_step_count % self.turns.len())
+ .cycle()
+ {
+ let current_fork: &PackedFork = &self.packed_map[current_location as usize];
+ current_location = match dir {
+ Turn::Left => current_fork.left,
+ Turn::Right => current_fork.right,
+ };
+ step_count += 1;
+
+ if self.packed_ghost_destinations.contains(&current_location) {
+ break;
+ }
+ }
+
+ (current_location, step_count)
+ })
+ .clone()
+ }
+}
+
+impl Turn {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ alt((
+ value(Turn::Left, nom_char('L')),
+ value(Turn::Right, nom_char('R')),
+ ))(input)
+ }
+}
+
+impl Location {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(alpha1, |l: &str| Location(l.to_string()))(input)
+ }
+
+ fn ghost_start(&self) -> bool {
+ self.0.ends_with("A")
+ }
+
+ fn ghost_end(&self) -> bool {
+ self.0.ends_with("Z")
+ }
+}
+
+impl Fork {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ preceded(
+ tag("("),
+ terminated(
+ separated_pair(Location::parser, tag(", "), Location::parser),
+ tag(")"),
+ ),
+ ),
+ |(left, right)| Fork { left, right },
+ )(input)
+ }
+}
diff --git a/2023/src/bin/day_9.rs b/2023/src/bin/day_9.rs
new file mode 100644
index 0000000..f0acc5e
--- /dev/null
+++ b/2023/src/bin/day_9.rs
@@ -0,0 +1,81 @@
+use nom::{
+ character::complete::{i32 as nom_i32, line_ending, space1},
+ combinator::map,
+ multi::separated_list1,
+ IResult,
+};
+use std::fs;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let input = fs::read_to_string("inputs/day_9.txt")?;
+ let parsed = DataSequences::parser(&input).unwrap().1;
+ dbg!(&parsed.sum_extrapolated_values());
+ dbg!(&parsed.sum_extrapolated_front_values());
+
+ Ok(())
+}
+
+#[derive(Debug)]
+struct DataSequences(Vec<DataSequence>);
+
+#[derive(Debug)]
+struct DataSequence(Vec<i32>);
+
+impl DataSequences {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(
+ separated_list1(line_ending, DataSequence::parser),
+ DataSequences,
+ )(input)
+ }
+
+ fn sum_extrapolated_values(&self) -> i32 {
+ self.0.iter().map(|d| d.extrapolate()).sum()
+ }
+
+ fn sum_extrapolated_front_values(&self) -> i32 {
+ self.0.iter().map(|d| d.extrapolate_front()).sum()
+ }
+}
+
+impl DataSequence {
+ fn parser(input: &str) -> IResult<&str, Self> {
+ map(separated_list1(space1, nom_i32), DataSequence)(input)
+ }
+
+ fn extrapolate(&self) -> i32 {
+ let mut differential_tree = vec![self.0.clone()];
+
+ while !differential_tree
+ .last()
+ .map_or(true, |l| l.iter().all(|i| *i == 0))
+ {
+ let last_row = differential_tree.last().unwrap();
+ let mut new_row = Vec::with_capacity(last_row.len() - 1);
+ for i in 0..last_row.len() - 1 {
+ new_row.push(last_row[i + 1] - last_row[i]);
+ }
+ differential_tree.push(new_row);
+ }
+
+ differential_tree.last_mut().unwrap().push(0);
+
+ while differential_tree.len() > 1 {
+ let bottom_row = differential_tree.pop().unwrap();
+
+ let new_last = differential_tree.last_mut().unwrap();
+ new_last.push(new_last.last().unwrap() + bottom_row.last().unwrap());
+ }
+
+ differential_tree
+ .last()
+ .and_then(|l| l.last())
+ .copied()
+ .unwrap()
+ }
+
+ fn extrapolate_front(&self) -> i32 {
+ let backwards = DataSequence(self.0.iter().rev().copied().collect());
+ backwards.extrapolate()
+ }
+}