1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
|
defmodule Tokenizer do
use Agent
def start do
Agent.start_link(fn -> {0, %{}} end, name: __MODULE__)
end
def tokenize(raw) do
strTokens = List.flatten(Regex.scan(~r/[eA-Z][a-z]*/, raw))
Enum.map(strTokens, fn strToken ->
{lastToken, token} =
Agent.get(__MODULE__, fn {lastToken, map} -> {lastToken, Map.get(map, strToken)} end)
if token do
token
else
token = lastToken + 1
Agent.update(__MODULE__, fn {_, map} -> {token, Map.put(map, strToken, token)} end)
token
end
end)
|> Enum.into(<<>>, fn token -> <<token>> end)
end
end
{:ok, _} = Tokenizer.start()
{replacements, medicine} =
File.stream!("inputs/day19.txt")
|> Stream.map(&String.trim/1)
|> Stream.filter(&(&1 != ""))
|> Stream.map(fn line ->
case Regex.run(~r/(\w+) => (\w+)/, line) do
[_, from, to] ->
{:replacement, Tokenizer.tokenize(from), Tokenizer.tokenize(to)}
_ ->
{:start, Tokenizer.tokenize(line)}
end
end)
|> Enum.reduce({[], nil}, fn
{:replacement, from, to}, {replacements, start} ->
{Enum.sort_by([{from, to} | replacements], fn {_, to} -> byte_size(to) end, :desc), start}
{:start, start}, {replacements, _start} ->
{replacements, start}
end)
defmodule MedicineSearcher do
use Agent
def start do
Agent.start_link(fn -> %{} end, name: __MODULE__)
end
def nextGen(start, replacements) do
Enum.flat_map(0..(byte_size(start) - 1), fn offset ->
beforeReplace = binary_part(start, 0, offset)
replaceStart = binary_part(start, offset, byte_size(start) - offset)
<<replaceStart, afterReplace::binary>> = replaceStart
Enum.filter(replacements, fn {from, _} -> <<replaceStart>> == from end)
|> Enum.map(fn {_, to} ->
<<beforeReplace <> to <> afterReplace>>
end)
end)
|> Enum.uniq()
end
def previousGen(start, replacements) do
Enum.flat_map(replacements, fn {from, to} ->
Enum.map(0..(byte_size(start) - 1), fn offset ->
beforeReplace = binary_part(start, 0, offset)
replaceStart = binary_part(start, offset, byte_size(start) - offset)
{beforeReplace, replaceStart}
end)
|> Enum.filter(fn {_, replaceStart} -> String.starts_with?(replaceStart, to) end)
|> Enum.map(fn {beforeReplace, replaceStart} ->
afterReplace =
binary_part(replaceStart, byte_size(to), byte_size(replaceStart) - byte_size(to))
<<beforeReplace <> from <> afterReplace>>
end)
end)
|> Enum.uniq()
end
def findPathDistance(start, _replacements, medicine) when start == medicine do
0
end
def findPathDistance(start, replacements, medicine) do
cached = Agent.get(__MODULE__, &Map.get(&1, medicine))
if cached do
cached
else
previous = previousGen(medicine, replacements)
shortestPaths =
Stream.map(previous, &findPathDistance(start, replacements, &1))
|> Stream.filter(&(&1 != :no_route))
|> Stream.map(&(&1 + 1))
|> Enum.take(1)
shortest =
case shortestPaths do
[shortest] -> shortest
[] -> :no_route
end
Agent.update(__MODULE__, &Map.put(&1, medicine, shortest))
shortest
end
end
end
calibration = MedicineSearcher.nextGen(medicine, replacements)
IO.puts("Calibration size: #{length(calibration)}")
start = Tokenizer.tokenize("e")
IO.puts("Max generations: #{byte_size(medicine)}")
{:ok, _} = MedicineSearcher.start()
medicineGeneration = MedicineSearcher.findPathDistance(start, replacements, medicine)
IO.puts("The Medicine generation: #{medicineGeneration}")
|