#pragma once #include template class NeuralNetTwoLayers { LAYER1_TYPE m_layer1; LAYER2_TYPE m_layer2; double m_lfLearningRate; public: NeuralNetTwoLayers(void); ~NeuralNetTwoLayers(void); void SetLearnRate(double lfLearnRate) { m_lfLearningRate = lfLearnRate; } bool Train(const double * const arrLfInputs, const double * const arrLfIntendedOutputs, double * arrLfScratch) { m_layer1.Eval(arrLfInputs); m_layer2.Eval(m_layer1.GetOutputVec()); const double * arrLfActualOutputs = m_layer2.GetOutputVec(); for(int i = 0; i < m_layer2.GetNumOutputs(); ++i) { arrLfScratch[i] = arrLfActualOutputs[i] - arrLfIntendedOutputs[i]; } m_layer2.CalcErrorPartials(arrLfScratch); m_layer1.CalcErrorPartials(m_layer2.GetDerrDinput()); m_layer2.DoBackpropStep(m_lfLearningRate, m_layer1.GetOutputVec()); m_layer1.DoBackpropStep(m_lfLearningRate, arrLfInputs); //#if _DEBUG // m_layer1.Eval(arrLfInputs); // m_layer2.Eval(m_layer1.GetOutputVec()); //#endif return true; } const double * const Eval(const double * const arrLfInputs) { m_layer1.Eval(arrLfInputs); m_layer2.Eval(m_layer1.GetOutputVec()); return m_layer2.GetOutputVec(); } void Serialize(FILE * fiOut) { m_layer1.Serialize(fiOut); m_layer2.Serialize(fiOut); } void DeSerialize(FILE * fiIn) { m_layer1.DeSerialize(fiIn); m_layer2.DeSerialize(fiIn); } const double * const GetPartialDerivatives(const double * const arrLfInputs, int nWhichOut, double * arrLfScratch) { m_layer1.Eval(arrLfInputs); m_layer2.Eval(m_layer1.GetOutputVec()); for(int i = 0; i < m_layer3.GetNumOutputs(); ++i) { arrLfScratch[i] = 0.0; } arrLfScratch[nWhichOut] = 1.0; m_layer2.CalcErrorPartials(arrLfScratch); m_layer1.CalcErrorPartials(m_layer2.GetDerrDinput()); return m_layer1.GetDerrDinput(); } }; template NeuralNetTwoLayers::NeuralNetTwoLayers(void):m_lfLearningRate(0.1) { } template NeuralNetTwoLayers::~NeuralNetTwoLayers(void) { }