-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathlogic_neural_network.cpp
98 lines (76 loc) · 2.37 KB
/
logic_neural_network.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
#include "logic_neural_network.h"
#include <fstream>
#include <cassert>
LogicNeuralNetwork::LogicNeuralNetwork(const std::vector<size_t>& topology, const std::string& data_file_name, const std::string& network_name) : NeuralNet(topology), testing_correct_(0), testing_incorrect_(0), is_print_testing_info_(false)
{
data_file_name_ = data_file_name;
network_name_ = network_name;
}
const std::string& LogicNeuralNetwork::GetDataFileName() const
{
return data_file_name_;
}
const std::string& LogicNeuralNetwork::GetNetworkName() const
{
return network_name_;
}
size_t LogicNeuralNetwork::GetTestingCorrect() const
{
return testing_correct_;
}
size_t LogicNeuralNetwork::GetTestingIncorrect() const
{
return testing_incorrect_;
}
void LogicNeuralNetwork::SetDataFileName(const std::string& data_file_name)
{
data_file_name_ = data_file_name;
}
void LogicNeuralNetwork::SetNetworkName(const std::string& network_name)
{
network_name_ = network_name;
}
bool LogicNeuralNetwork::TrainNetwork()
{
//defining and initializing variables
std::vector<size_t>& topology = GetTopologyRef();
std::vector<double> input_vals(topology.front());
std::vector<double> target_vals(topology.back());
std::vector<double> result_vals(topology.back());
std::ifstream input(data_file_name_);
if (!input.is_open()) return false;
double read_data;
//testing body loop
while (input.good()) {
//clear the vectors
input_vals.clear();
target_vals.clear();
result_vals.clear();
//getting the input data for one line of data
for (size_t input_index = 0; input_index < topology.front(); ++input_index) {
input >> read_data;
input_vals.push_back(read_data);
}
//getting the output data for one line of data
for (size_t output_index = 0; output_index < topology.back(); ++output_index) {
input >> read_data;
target_vals.push_back(read_data);
}
//TODO: add in a flag for printing the testing data while it is ran
//if(is_print_testing_info_)
FeedForward(input_vals);
BackPropagate(target_vals);
result_vals = GetResults();
result_vals.front() = (result_vals[0] < 0.5) ? 0 : 1;
if (result_vals.front() == target_vals.front()) {
++testing_correct_;
}
else {
++testing_incorrect_;
}
//making sure the training data inputs match topology quantities:
assert(input_vals.size() == topology.front());
assert(target_vals.size() == topology.back());
}
return true;
}