summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJustin Wernick <justin@worthe-it.co.za>2024-10-11 13:45:26 +0200
committerJustin Wernick <justin@worthe-it.co.za>2024-10-11 13:45:26 +0200
commit307a9d41d955d8ee00fb03d7eef039a7251d2a8c (patch)
treeec4f7cd882f8eeae2f51edcc5d3ecf0fa45ff3b3
parent627051329630ca122f4f35274d360040c229d279 (diff)
Optimize day 19 part 1. Still no luck on part 2.
-rw-r--r--2015/day19.exs56
1 files changed, 45 insertions, 11 deletions
diff --git a/2015/day19.exs b/2015/day19.exs
index 2aae946..d73c9c0 100644
--- a/2015/day19.exs
+++ b/2015/day19.exs
@@ -1,11 +1,42 @@
+defmodule Tokenizer do
+ use Agent
+
+ def start do
+ Agent.start_link(fn -> {0, %{}} end, name: __MODULE__)
+ end
+
+ def tokenize(raw) do
+ strTokens = List.flatten(Regex.scan(~r/[eA-Z][a-z]*/, raw))
+
+ Enum.map(strTokens, fn strToken ->
+ {lastToken, token} =
+ Agent.get(__MODULE__, fn {lastToken, map} -> {lastToken, Map.get(map, strToken)} end)
+
+ if token do
+ token
+ else
+ token = lastToken + 1
+ Agent.update(__MODULE__, fn {_, map} -> {token, Map.put(map, strToken, token)} end)
+ token
+ end
+ end)
+ end
+end
+
+{:ok, _} = Tokenizer.start()
+
{replacements, medicine} =
File.stream!("inputs/day19.txt")
|> Stream.map(&String.trim/1)
|> Stream.filter(&(&1 != ""))
|> Stream.map(fn line ->
case Regex.run(~r/(\w+) => (\w+)/, line) do
- [_, from, to] -> {:replacement, from, to}
- _ -> {:start, line}
+ [_, from, to] ->
+ [from] = Tokenizer.tokenize(from)
+ {:replacement, from, Tokenizer.tokenize(to)}
+
+ _ ->
+ {:start, Tokenizer.tokenize(line)}
end
end)
|> Enum.reduce({[], nil}, fn
@@ -14,24 +45,27 @@
end)
nextGen = fn start ->
- Enum.flat_map(0..(String.length(start) - 1), fn offset ->
- {beforeReplace, replaceStart} = String.split_at(start, offset)
+ Enum.flat_map(0..(length(start) - 1), fn offset ->
+ {beforeReplace, [replaceStart | afterReplace]} = Enum.split(start, offset)
- Enum.filter(replacements, fn {from, _} -> String.starts_with?(replaceStart, from) end)
- |> Enum.map(fn {from, to} ->
- {^from, afterReplace} = String.split_at(replaceStart, String.length(from))
- <<beforeReplace <> to <> afterReplace>>
+ Enum.filter(replacements, fn {from, _} -> replaceStart == from end)
+ |> Enum.map(fn {_, to} ->
+ beforeReplace ++ to ++ afterReplace
end)
end)
- |> Enum.uniq()
end
-calibration = nextGen.(medicine)
+calibration = Enum.uniq(nextGen.(medicine))
IO.puts("Calibration size: #{length(calibration)}")
medicineGeneration =
- Stream.iterate(["e"], fn gen -> Enum.flat_map(gen, &nextGen.(&1)) |> Enum.uniq() end)
+ Stream.iterate([Tokenizer.tokenize("e")], fn gen ->
+ Enum.flat_map(gen, &nextGen.(&1))
+ |> Enum.filter(&(length(&1) <= length(medicine)))
+ |> Enum.uniq()
+ end)
+ |> Stream.each(&IO.inspect("#{length(&1)}"))
|> Enum.find_index(fn gen -> Enum.any?(gen, &(&1 == medicine)) end)
IO.puts("The Medicine generation: #{medicineGeneration}")