#pragma once template class NeuralNetOneLayer { LAYER1_TYPE m_layer1; double m_lfLearningRate; public: NeuralNetOneLayer(void); ~NeuralNetOneLayer(void); void SetLearnRate(double lfLearnRate) { m_lfLearningRate = lfLearnRate; } bool Train(const double * const arrLfInputs, const double * const arrLfIntendedOutputs, double * arrLfScratch) { m_layer1.Eval(arrLfInputs); const double * arrLfActualOutputs = m_layer1.GetOutputVec(); for(int i = 0; i < m_layer1.GetNumOutputs(); ++i) { arrLfScratch[i] = arrLfActualOutputs[i] - arrLfIntendedOutputs[i]; } m_layer1.CalcErrorPartials(arrLfScratch); m_layer1.DoBackpropStep(m_lfLearningRate, arrLfInputs); return true; } const double * const Eval(const double * const arrLfInputs) { m_layer1.Eval(arrLfInputs); return m_layer1.GetOutputVec(); } //bool Eval(const double * const arrLfInputs); //bool CalcErrorPartials(const double * const arrLfDErrDInputPrev); // gets dErr/dAct and dErr/dInput //const double * const GetOutputVec() const { return m_arrLfOutputs; } //const double * const GetDErrDAct() const { return m_arrLfDerrorDactivation; } // gets sigmas }; template NeuralNetOneLayer::NeuralNetOneLayer(void):m_lfLearningRate(0.1) { } template NeuralNetOneLayer::~NeuralNetOneLayer(void) { }