Filled in the remaining TODOs on the tree search
authorJustin Worthe <justin@worthe-it.co.za>
Thu, 6 Sep 2018 19:20:04 +0000 (21:20 +0200)
committerJustin Worthe <justin@worthe-it.co.za>
Thu, 6 Sep 2018 19:20:04 +0000 (21:20 +0200)
src/engine/constants.rs
src/engine/status.rs
src/main.rs
src/strategy/monte_carlo.rs
src/strategy/monte_carlo_tree.rs

index 7d260e8..951a756 100644 (file)
@@ -2,6 +2,9 @@ pub const FULL_MAP_WIDTH: u8 = 16;
 pub const SINGLE_MAP_WIDTH: u8 = FULL_MAP_WIDTH/2;
 pub const MAP_HEIGHT: u8 = 8;
 
+pub const MAX_MOVES: u16 = 400;
+pub const INIT_SEED: [u8;16] = [0x7b, 0x6a, 0xe1, 0xf4, 0x41, 0x3c, 0xe9, 0x0f, 0x67, 0x81, 0x67, 0x99, 0x77, 0x0a, 0x6b, 0xda];
+
 pub const MISSILE_COOLDOWN: usize = 3;
 pub const MISSILE_COOLDOWN_STATES: usize = MISSILE_COOLDOWN+1;
 pub const MISSILE_SPEED: usize = 2;
index 1fa7ac0..d6ee4dd 100644 (file)
@@ -5,3 +5,4 @@ pub enum GameStatus {
     OpponentWon,
     Draw
 }
+
index 45fd356..26d6eac 100644 (file)
@@ -22,7 +22,6 @@ fn write_command(filename: &str, command: Command) -> Result<(), Box<Error> > {
     Ok(())
 }
 
-
 fn main() {
     let start_time = PreciseTime::now();
     let max_time = Duration::milliseconds(MAX_TIME_MILLIS);
@@ -34,6 +33,8 @@ fn main() {
             process::exit(1);
         }
     };
+
+    // TODO: Opening playbook?
     let command = strategy::monte_carlo::choose_move(&state, start_time, max_time);
 
     match write_command(COMMAND_PATH, command) {
index dba735c..56701f5 100644 (file)
@@ -10,9 +10,6 @@ use rand::{Rng, XorShiftRng, SeedableRng};
 
 use arrayvec::ArrayVec;
 
-const MAX_MOVES: u16 = 400;
-const INIT_SEED: [u8;16] = [0x7b, 0x6a, 0xe1, 0xf4, 0x41, 0x3c, 0xe9, 0x0f, 0x67, 0x81, 0x67, 0x99, 0x77, 0x0a, 0x6b, 0xda];
-
 use time::{Duration, PreciseTime};
 
 #[cfg(not(feature = "single-threaded"))]
@@ -168,7 +165,7 @@ fn simulate_to_endstate<R: Rng>(command_score: &mut CommandScore, state: &Bitwis
 }
 
 #[cfg(feature = "heuristic-random")]
-fn random_move<R: Rng>(player: &Player, opponent: &Player, rng: &mut R) -> Command {
+pub fn random_move<R: Rng>(player: &Player, opponent: &Player, rng: &mut R) -> Command {
     lazy_static! {
         static ref MOVES: [Command; NUMBER_OF_POSSIBLE_MOVES] = {
             let mut m = [Command::Nothing; NUMBER_OF_POSSIBLE_MOVES];
@@ -318,7 +315,7 @@ fn random_move<R: Rng>(player: &Player, opponent: &Player, rng: &mut R) -> Comma
 }
 
 #[cfg(not(feature = "heuristic-random"))]
-fn random_move<R: Rng>(player: &Player, _opponent: &Player, rng: &mut R) -> Command {
+pub fn random_move<R: Rng>(player: &Player, _opponent: &Player, rng: &mut R) -> Command {
     let free_positions_count = player.unoccupied_cell_count();
 
     let open_building_spot = free_positions_count > 0;
index 62cc8e5..4efded8 100644 (file)
@@ -7,123 +7,216 @@ use engine::geometry::*;
 use rand::{Rng, XorShiftRng, SeedableRng};
 use time::{Duration, PreciseTime};
 
+use strategy::monte_carlo;
 
-enum SearchTree {
-    Leaf(NodeStats),
-    FullyExploredNode(FullyExploredStats),
-    PartiallyExploredNode(PartiallyExploredStats)
-}
+use arrayvec::ArrayVec;
 
+#[derive(Debug)]
 struct NodeStats {
-    wins: u32,
-    attempts: u32
+    wins: f32,
+    losses: f32,
+    attempts: f32,
+    explored: Vec<(Command, NodeStats)>,
+    unexplored: Vec<Command>
 }
 
-struct FullyExploredStats {
-    wins: u32,
-    attempts: u32,
-    explored: Vec<(Command, SearchTree)>
-}
+impl NodeStats {
+    fn create_node(player: &Player) -> NodeStats {
+        let unoccupied_cells_count = player.unoccupied_cell_count();
+        let unoccupied_cells = (0..unoccupied_cells_count)
+            .map(|i| player.location_of_unoccupied_cell(i));
 
-struct PartiallyExploredStats {
-    wins: u32,
-    attempts: u32,
-    explored: Vec<(Command, SearchTree)>,
-    unexplored: Vec<Command>
-}
+        let mut all_buildings: ArrayVec<[BuildingType; NUMBER_OF_BUILDING_TYPES]> = ArrayVec::new();
+        if DEFENCE_PRICE <= player.energy {
+            all_buildings.push(BuildingType::Defence);
+        }
+        if MISSILE_PRICE <= player.energy {
+            all_buildings.push(BuildingType::Attack);
+        }
+        if ENERGY_PRICE <= player.energy {
+            all_buildings.push(BuildingType::Energy);
+        }
+        if TESLA_PRICE <= player.energy && !player.has_max_teslas() {
+            all_buildings.push(BuildingType::Tesla);
+        }
+        
+        let building_command_count = unoccupied_cells.len()*all_buildings.len();
+
+        let mut commands = Vec::with_capacity(building_command_count + 2);
+
+        commands.push(Command::Nothing);
+        if IRON_CURTAIN_PRICE <= player.energy && player.can_build_iron_curtain() {
+            commands.push(Command::IronCurtain);
+        }
 
-impl SearchTree {
-    fn create_node(state: &Player) -> SearchTree {
-        SearchTree::PartiallyExploredNode(PartiallyExploredStats {
-            wins: 0,
-            attempts: 0,
-            explored: Vec::new(),
-            unexplored: Vec::new() //TODO
-        })
+        for position in unoccupied_cells {
+            for &building in &all_buildings {
+                commands.push(Command::Build(position, building));
+            }
+        }
+        
+        NodeStats {
+            wins: 0.,
+            losses: 0.,
+            attempts: 0.,
+            explored: Vec::with_capacity(commands.len()),
+            unexplored: commands
+        }
+    }
+    
+    fn node_with_highest_ucb<'a>(&'a mut self) -> &'a mut (Command, NodeStats) {
+        debug_assert!(self.unexplored.is_empty());
+        let total_attempts = self.explored.iter().map(|(_, n)| n.attempts).sum::<f32>();
+
+        let mut max_position = 0;
+        let mut max_value = self.explored[0].1.ucb(total_attempts);
+        for i in 1..self.explored.len() {
+            let value = self.explored[i].1.ucb(total_attempts);
+            if value > max_value {
+                max_position = i;
+                max_value = value;
+            }
+        }
+        &mut self.explored[max_position]
     }
-}
 
-impl FullyExploredStats {
-    fn node_with_highest_ucb<'a>(&'a mut self) -> &'a mut (Command, SearchTree) {
-        //TODO
-        &mut self.explored[0]
+    fn ucb(&self, n: f32) -> f32 {
+        self.wins / self.attempts + (2.0 * n / self.attempts).sqrt()
     }
-}
 
-impl PartiallyExploredStats {
-    fn add_node<'a>(&'a mut self, state: &Player, command: Command) -> &'a mut (Command, SearchTree) {
-        //TODO: Insert
-        let node = SearchTree::create_node(state);
+    fn add_node<'a>(&'a mut self, player: &Player, command: Command) -> &'a mut (Command, NodeStats) {
+        let node = NodeStats::create_node(player);
         self.explored.push((command, node));
+        self.unexplored.retain(|c| *c != command);
         self.explored.last_mut().unwrap()
     }
+
+    fn add_victory(&mut self) {
+        self.attempts += 1.;
+        self.wins += 1.;
+    }
+    fn add_defeat(&mut self) {
+        self.attempts += 1.;
+        self.losses += 1.;
+    }
+    fn add_draw(&mut self) {
+        self.attempts += 1.;
+    }
 }
 
-use self::SearchTree::*;
 pub fn choose_move(state: &BitwiseGameState, start_time: PreciseTime, max_time: Duration) -> Command {
+    let mut rng = XorShiftRng::from_seed(INIT_SEED);
     
-    
-    // create root node as partially explored node
-    // creating a new node needs to populate all (valid) unexplored moves
-
-    let mut root = SearchTree::create_node(&state.player);
+    let mut root = NodeStats::create_node(&state.player);
 
-    loop {
-        // TODO: Break out!
-        tree_search(&state, &mut root);
+    while start_time.to(PreciseTime::now()) < max_time {
+        tree_search(&state, &mut root, &mut rng);
     }
-    
-    Command::Nothing    
-}
 
-// TODO: Max depth
+    let (command, _) = root.node_with_highest_ucb();
+    command.clone()
+}
 
-fn tree_search(state: &BitwiseGameState, tree: &mut SearchTree) -> bool {
-    match tree {
-        Leaf(stats) => {
-            // ???
-            false
-        },
-        FullyExploredNode(ref mut stats) => {
+fn tree_search<R: Rng>(state: &BitwiseGameState, stats: &mut NodeStats, rng: &mut R) -> GameStatus {
+    // root is opponent move
+    // node being added is player move
+    
+    if state.round >= MAX_MOVES {
+        return GameStatus::Draw
+    }
+    
+    if stats.unexplored.is_empty() {
+        let result = {
             let (next_command, next_tree) = stats.node_with_highest_ucb();
-            tree_search_opponent(state, next_tree, next_command.clone())
-            // TODO: Back-propagation?
-        },
-        PartiallyExploredNode(ref mut stats) => {
-            let next_command = stats.unexplored[0].clone(); // TODO: Random
-            let next_tree = stats.add_node(&state.opponent, next_command);
-
-            // TODO: simulate to end
-            // TODO: Back-propagate
-            false
-        }
+            tree_search_opponent(state, next_tree, next_command.clone(), rng)
+        };
+        match result {
+            GameStatus::PlayerWon => {stats.add_defeat()},
+            GameStatus::OpponentWon => {stats.add_victory()},
+            _ => {stats.add_draw()}
+        };
+        result
+    } else {
+        let next_command = rng.choose(&stats.unexplored).expect("Partially explored had no options").clone();
+        let result = {
+            let (_, next_stats) = stats.add_node(&state.opponent, next_command);
+
+            let opponent_random = monte_carlo::random_move(&state.opponent, &state.player, rng);
+            let mut next_state = state.clone();
+            next_state.simulate(next_command, opponent_random);
+
+            let result = simulate_to_endstate(next_state, rng);
+            match result {
+                GameStatus::PlayerWon => {next_stats.add_victory()},
+                GameStatus::OpponentWon => {next_stats.add_defeat()},
+                _ => {next_stats.add_draw()}
+            };
+            
+            result
+        };
+
+        match result {
+            GameStatus::PlayerWon => {stats.add_defeat()},
+            GameStatus::OpponentWon => {stats.add_victory()},
+            _ => {stats.add_draw()}
+        };
+        result
     }
 }
 
-fn tree_search_opponent(state: &BitwiseGameState, tree: &mut SearchTree, player_command: Command) -> bool {
-    match tree {
-        Leaf(stats) => {
-            // ???
-            false
-        },
-        FullyExploredNode(ref mut stats) => {
+fn tree_search_opponent<R: Rng>(state: &BitwiseGameState, stats: &mut NodeStats, player_command: Command, rng: &mut R) -> GameStatus {
+    // root is player move
+    // node being added is opponent move
+
+    if stats.unexplored.is_empty() {
+        let result = {
             let (next_command, next_tree) = stats.node_with_highest_ucb();
             let mut next_state = state.clone();
             next_state.simulate(player_command, next_command.clone());
-            tree_search(&next_state, next_tree)
-            // TODO: Back-propagation?
-        },
-        PartiallyExploredNode(ref mut stats) => {
-            let next_command = stats.unexplored[0].clone(); // TODO: Random
-            
-            let mut next_state = state.clone();
-            next_state.simulate(player_command, next_command.clone());
+            tree_search(&next_state, next_tree, rng)
+        };
+        match result {
+            GameStatus::PlayerWon => {stats.add_victory()},
+            GameStatus::OpponentWon => {stats.add_defeat()},
+            _ => {stats.add_draw()}
+        };
+        result
+    } else {
+        let next_command = rng.choose(&stats.unexplored).expect("Partially explored had no options").clone();
+        let mut next_state = state.clone();
+        next_state.simulate(player_command, next_command);
+
+        let result = {
+            let (_, next_stats) = stats.add_node(&next_state.player, next_command);
+
+            let result = simulate_to_endstate(next_state, rng);
+            match result {
+                GameStatus::PlayerWon => {next_stats.add_defeat()},
+                GameStatus::OpponentWon => {next_stats.add_victory()},
+                _ => {next_stats.add_draw()}
+            };
             
-            let next_tree = stats.add_node(&next_state.player, next_command);
+            result
+        };
+        
+        match result {
+            GameStatus::PlayerWon => {stats.add_victory()},
+            GameStatus::OpponentWon => {stats.add_defeat()},
+            _ => {stats.add_draw()}
+        };
+        result
+    }
+}
 
-            // TODO: simulate to end
-            // TODO: Back-propagate
-            false
-        }
+
+fn simulate_to_endstate<R: Rng>(mut state: BitwiseGameState, rng: &mut R) -> GameStatus  {
+    let mut status = GameStatus::Continue;
+    
+    while status == GameStatus::Continue && state.round < MAX_MOVES {
+        let player_command = monte_carlo::random_move(&state.player, &state.opponent, rng);
+        let opponent_command = monte_carlo::random_move(&state.opponent, &state.player, rng);
+        status = state.simulate(player_command, opponent_command);
     }
+    status
 }
+