From 307a9d41d955d8ee00fb03d7eef039a7251d2a8c Mon Sep 17 00:00:00 2001 From: Justin Wernick Date: Fri, 11 Oct 2024 13:45:26 +0200 Subject: Optimize day 19 part 1. Still no luck on part 2. --- 2015/day19.exs | 56 +++++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 45 insertions(+), 11 deletions(-) diff --git a/2015/day19.exs b/2015/day19.exs index 2aae946..d73c9c0 100644 --- a/2015/day19.exs +++ b/2015/day19.exs @@ -1,11 +1,42 @@ +defmodule Tokenizer do + use Agent + + def start do + Agent.start_link(fn -> {0, %{}} end, name: __MODULE__) + end + + def tokenize(raw) do + strTokens = List.flatten(Regex.scan(~r/[eA-Z][a-z]*/, raw)) + + Enum.map(strTokens, fn strToken -> + {lastToken, token} = + Agent.get(__MODULE__, fn {lastToken, map} -> {lastToken, Map.get(map, strToken)} end) + + if token do + token + else + token = lastToken + 1 + Agent.update(__MODULE__, fn {_, map} -> {token, Map.put(map, strToken, token)} end) + token + end + end) + end +end + +{:ok, _} = Tokenizer.start() + {replacements, medicine} = File.stream!("inputs/day19.txt") |> Stream.map(&String.trim/1) |> Stream.filter(&(&1 != "")) |> Stream.map(fn line -> case Regex.run(~r/(\w+) => (\w+)/, line) do - [_, from, to] -> {:replacement, from, to} - _ -> {:start, line} + [_, from, to] -> + [from] = Tokenizer.tokenize(from) + {:replacement, from, Tokenizer.tokenize(to)} + + _ -> + {:start, Tokenizer.tokenize(line)} end end) |> Enum.reduce({[], nil}, fn @@ -14,24 +45,27 @@ end) nextGen = fn start -> - Enum.flat_map(0..(String.length(start) - 1), fn offset -> - {beforeReplace, replaceStart} = String.split_at(start, offset) + Enum.flat_map(0..(length(start) - 1), fn offset -> + {beforeReplace, [replaceStart | afterReplace]} = Enum.split(start, offset) - Enum.filter(replacements, fn {from, _} -> String.starts_with?(replaceStart, from) end) - |> Enum.map(fn {from, to} -> - {^from, afterReplace} = String.split_at(replaceStart, String.length(from)) - < to <> afterReplace>> + Enum.filter(replacements, fn {from, _} -> replaceStart == from end) + |> Enum.map(fn {_, to} -> + beforeReplace ++ to ++ afterReplace end) end) - |> Enum.uniq() end -calibration = nextGen.(medicine) +calibration = Enum.uniq(nextGen.(medicine)) IO.puts("Calibration size: #{length(calibration)}") medicineGeneration = - Stream.iterate(["e"], fn gen -> Enum.flat_map(gen, &nextGen.(&1)) |> Enum.uniq() end) + Stream.iterate([Tokenizer.tokenize("e")], fn gen -> + Enum.flat_map(gen, &nextGen.(&1)) + |> Enum.filter(&(length(&1) <= length(medicine))) + |> Enum.uniq() + end) + |> Stream.each(&IO.inspect("#{length(&1)}")) |> Enum.find_index(fn gen -> Enum.any?(gen, &(&1 == medicine)) end) IO.puts("The Medicine generation: #{medicineGeneration}") -- cgit v1.2.3