#pragma once #include template class NeuralNetFourLayers { LAYER1_TYPE m_layer1; LAYER2_TYPE m_layer2; LAYER3_TYPE m_layer3; LAYER4_TYPE m_layer4; double m_lfLearningRate; public: NeuralNetFourLayers(void); ~NeuralNetFourLayers(void); void SetLearnRate(double lfLearnRate) { m_lfLearningRate = lfLearnRate; } bool Train(const double * const arrLfInputs, const double * const arrLfIntendedOutputs, double * arrLfScratch) { m_layer1.Eval(arrLfInputs); m_layer2.Eval(m_layer1.GetOutputVec()); m_layer3.Eval(m_layer2.GetOutputVec()); m_layer4.Eval(m_layer3.GetOutputVec()); const double * arrLfActualOutputs = m_layer4.GetOutputVec(); for(int i = 0; i < m_layer4.GetNumOutputs(); ++i) { arrLfScratch[i] = arrLfActualOutputs[i] - arrLfIntendedOutputs[i]; } m_layer4.CalcErrorPartials(arrLfScratch); m_layer3.CalcErrorPartials(m_layer4.GetDerrDinput()); m_layer2.CalcErrorPartials(m_layer3.GetDerrDinput()); m_layer1.CalcErrorPartials(m_layer2.GetDerrDinput()); m_layer4.DoBackpropStep(m_lfLearningRate, m_layer3.GetOutputVec()); m_layer3.DoBackpropStep(m_lfLearningRate, m_layer2.GetOutputVec()); m_layer2.DoBackpropStep(m_lfLearningRate, m_layer1.GetOutputVec()); m_layer1.DoBackpropStep(m_lfLearningRate, arrLfInputs); return true; } const double * const Eval(const double * const arrLfInputs) { m_layer1.Eval(arrLfInputs); m_layer2.Eval(m_layer1.GetOutputVec()); m_layer3.Eval(m_layer2.GetOutputVec()); m_layer4.Eval(m_layer3.GetOutputVec()); return m_layer4.GetOutputVec(); } void Serialize(FILE * fiOut) { m_layer1.Serialize(fiOut); m_layer2.Serialize(fiOut); m_layer3.Serialize(fiOut); m_layer4.Serialize(fiOut); } void DeSerialize(FILE * fiIn) { m_layer1.DeSerialize(fiIn); m_layer2.DeSerialize(fiIn); m_layer3.DeSerialize(fiIn); m_layer4.DeSerialize(fiIn); } }; template NeuralNetFourLayers::NeuralNetFourLayers(void):m_lfLearningRate(0.1) { } template NeuralNetFourLayers::~NeuralNetFourLayers(void) { }