diff options
author | Justin Worthe <justin.worthe@gmail.com> | 2015-10-17 17:02:24 +0200 |
---|---|---|
committer | Justin Worthe <justin.worthe@gmail.com> | 2015-10-17 17:02:24 +0200 |
commit | 550caeee11086bd56db69176b3149ddfa160ee30 (patch) | |
tree | c935b819a4ad165fbef567db67ad9ddcca62df04 /src/brain/neural_network.cpp | |
parent | 31a82b0359515ae579514024b22873fb708c3f53 (diff) |
Reverted to a simple decision tree
Turns out it's much easier to write a bot by hand with if statements.
Diffstat (limited to 'src/brain/neural_network.cpp')
-rw-r--r-- | src/brain/neural_network.cpp | 110 |
1 files changed, 64 insertions, 46 deletions
diff --git a/src/brain/neural_network.cpp b/src/brain/neural_network.cpp index 0c23771..c8177e8 100644 --- a/src/brain/neural_network.cpp +++ b/src/brain/neural_network.cpp @@ -12,13 +12,13 @@ NeuralNetwork::NeuralNetwork(std::istream &&networkConfigFile, unsigned int numb for (unsigned int i=0; i<numberOfSensors; ++i) { - auto sensor = std::make_shared<Sensor>(); - _sensors.push_back(sensor); + auto sensor = std::make_shared<Sensor>(i); + _sensors.push_back(sensor); } for (unsigned int i=0; i<numberOfOutputs; ++i) { - auto output = findOrAddNeuron(i); - _outputs.push_back(output); + auto output = findOrAddNeuron(i); + _outputs.push_back(output); } parseFile(std::move(networkConfigFile)); @@ -34,14 +34,14 @@ NeuralNetwork::NeuralNetwork(std::istream &&networkConfigFile, std::vector<bool> for (unsigned int i=0; i<sensorInitialValues.size(); ++i) { - auto sensor = std::make_shared<Sensor>(); - sensor->setActivation(sensorInitialValues[i] ? 1 : 0); - _sensors.push_back(sensor); + auto sensor = std::make_shared<Sensor>(i); + sensor->setActivation(sensorInitialValues[i] ? 1 : 0); + _sensors.push_back(sensor); } for (unsigned int i=0; i<numberOfOutputs; ++i) { - auto output = findOrAddNeuron(i); - _outputs.push_back(output); + auto output = findOrAddNeuron(i); + _outputs.push_back(output); } parseFile(std::move(networkConfigFile)); @@ -55,29 +55,28 @@ void NeuralNetwork::parseFile(std::istream &&file) unsigned int destId; while (file.get(srcType) && - file >> srcId && - file.ignore(std::numeric_limits<std::streamsize>::max(), 'n') && - file >> destId && - file >> weight && - file.ignore(std::numeric_limits<std::streamsize>::max(), '\n')) + file >> srcId && + file.ignore(std::numeric_limits<std::streamsize>::max(), 'n') && + file >> destId && + file >> weight && + file.ignore(std::numeric_limits<std::streamsize>::max(), '\n')) { + std::shared_ptr<NeuralNode> source; + std::shared_ptr<Neuron> destination; + switch (srcType) + { + case 's': + source = findOrAddSensor(srcId); + break; + case 'b': + source = _biasNode; + break; + default: + source = findOrAddNeuron(srcId); + } + destination = findOrAddNeuron(destId); - std::shared_ptr<NeuralNode> source; - std::shared_ptr<Neuron> destination; - switch (srcType) - { - case 's': - source = findOrAddSensor(srcId); - break; - case 'b': - source = _biasNode; - break; - default: - source = findOrAddNeuron(srcId); - } - destination = findOrAddNeuron(destId); - - addLink(source, destination, weight); + addLink(source, destination, weight); } } @@ -92,8 +91,8 @@ std::shared_ptr<Sensor> NeuralNetwork::findOrAddSensor(unsigned int id) { while (_sensors.size() <= id) { - auto sensor = std::make_shared<Sensor>(); - _sensors.push_back(sensor); + auto sensor = std::make_shared<Sensor>(_sensors.size()); + _sensors.push_back(sensor); } return _sensors.at(id); @@ -103,8 +102,8 @@ std::shared_ptr<Neuron> NeuralNetwork::findOrAddNeuron(unsigned int id) { while (_neurons.size() <= id) { - auto neuron = std::make_shared<Neuron>(); - _neurons.push_back(neuron); + auto neuron = std::make_shared<Neuron>(_neurons.size()); + _neurons.push_back(neuron); } return _neurons.at(id); @@ -121,12 +120,12 @@ unsigned int NeuralNetwork::findMaxOutputIndex() const auto maxIterations = _neurons.size()*10; for (unsigned int iteration=0; anyNodeChanged && iteration<maxIterations; ++iteration) { - anyNodeChanged = false; - for (auto const& neuron : _neurons) - { - bool activationChanged = neuron->calculateActivation(); - anyNodeChanged = anyNodeChanged || activationChanged; - } + anyNodeChanged = false; + for (auto const& neuron : _neurons) + { + bool activationChanged = neuron->calculateActivation(); + anyNodeChanged = anyNodeChanged || activationChanged; + } } int currentMaxIndex = 0; @@ -134,12 +133,31 @@ unsigned int NeuralNetwork::findMaxOutputIndex() const for (unsigned int i=1; i<_outputs.size(); ++i) { - double activation = _outputs.at(i)->activation(); - if (activation >= currentMaxActivation) - { - currentMaxActivation = activation; - currentMaxIndex = i; - } + double activation = _outputs.at(i)->activation(); + if (activation >= currentMaxActivation) + { + currentMaxActivation = activation; + currentMaxIndex = i; + } } return currentMaxIndex; } + +bool NeuralNetwork::linkExists(std::string srcIdentifier, std::string destIdentifier, double weight) const +{ + std::shared_ptr<Neuron> dest; + + for (auto const& node : _neurons) + { + if (node->identifier() == destIdentifier) + { + dest = node; + } + } + + if (!dest) + { + return false; + } + return dest->hasInputWithWeight(srcIdentifier, weight); +} |