Filled in high level outline of monte carlo tree
[entelect-challenge-tower-defence.git] / src / strategy / monte_carlo_tree.rs
1 use engine::command::*;
2 use engine::status::GameStatus;
3 use engine::bitwise_engine::{Player, BitwiseGameState};
4 use engine::constants::*;
5 use engine::geometry::*;
6
7 use rand::{Rng, XorShiftRng, SeedableRng};
8 use time::{Duration, PreciseTime};
9
10
11 enum SearchTree {
12     Leaf(NodeStats),
13     FullyExploredNode(FullyExploredStats),
14     PartiallyExploredNode(PartiallyExploredStats)
15 }
16
17 struct NodeStats {
18     wins: u32,
19     attempts: u32
20 }
21
22 struct FullyExploredStats {
23     wins: u32,
24     attempts: u32,
25     explored: Vec<(Command, SearchTree)>
26 }
27
28 struct PartiallyExploredStats {
29     wins: u32,
30     attempts: u32,
31     explored: Vec<(Command, SearchTree)>,
32     unexplored: Vec<Command>
33 }
34
35 impl SearchTree {
36     fn create_node(state: &Player) -> SearchTree {
37         SearchTree::PartiallyExploredNode(PartiallyExploredStats {
38             wins: 0,
39             attempts: 0,
40             explored: Vec::new(),
41             unexplored: Vec::new() //TODO
42         })
43     }
44 }
45
46 impl FullyExploredStats {
47     fn node_with_highest_ucb<'a>(&'a mut self) -> &'a mut (Command, SearchTree) {
48         //TODO
49         &mut self.explored[0]
50     }
51 }
52
53 impl PartiallyExploredStats {
54     fn add_node<'a>(&'a mut self, state: &Player, command: Command) -> &'a mut (Command, SearchTree) {
55         //TODO: Insert
56         let node = SearchTree::create_node(state);
57         self.explored.push((command, node));
58         self.explored.last_mut().unwrap()
59     }
60 }
61
62 use self::SearchTree::*;
63 pub fn choose_move(state: &BitwiseGameState, start_time: PreciseTime, max_time: Duration) -> Command {
64     
65     
66     // create root node as partially explored node
67     // creating a new node needs to populate all (valid) unexplored moves
68
69     let mut root = SearchTree::create_node(&state.player);
70
71     loop {
72         // TODO: Break out!
73         tree_search(&state, &mut root);
74     }
75     
76     Command::Nothing    
77 }
78
79 // TODO: Max depth
80
81 fn tree_search(state: &BitwiseGameState, tree: &mut SearchTree) -> bool {
82     match tree {
83         Leaf(stats) => {
84             // ???
85             false
86         },
87         FullyExploredNode(ref mut stats) => {
88             let (next_command, next_tree) = stats.node_with_highest_ucb();
89             tree_search_opponent(state, next_tree, next_command.clone())
90             // TODO: Back-propagation?
91         },
92         PartiallyExploredNode(ref mut stats) => {
93             let next_command = stats.unexplored[0].clone(); // TODO: Random
94             let next_tree = stats.add_node(&state.opponent, next_command);
95
96             // TODO: simulate to end
97             // TODO: Back-propagate
98             false
99         }
100     }
101 }
102
103 fn tree_search_opponent(state: &BitwiseGameState, tree: &mut SearchTree, player_command: Command) -> bool {
104     match tree {
105         Leaf(stats) => {
106             // ???
107             false
108         },
109         FullyExploredNode(ref mut stats) => {
110             let (next_command, next_tree) = stats.node_with_highest_ucb();
111             let mut next_state = state.clone();
112             next_state.simulate(player_command, next_command.clone());
113             tree_search(&next_state, next_tree)
114             // TODO: Back-propagation?
115         },
116         PartiallyExploredNode(ref mut stats) => {
117             let next_command = stats.unexplored[0].clone(); // TODO: Random
118             
119             let mut next_state = state.clone();
120             next_state.simulate(player_command, next_command.clone());
121             
122             let next_tree = stats.add_node(&next_state.player, next_command);
123
124             // TODO: simulate to end
125             // TODO: Back-propagate
126             false
127         }
128     }
129 }