1 Star 0 Fork 28

Qt(开源集合) / BPNNDemoQt

forked from ZzqiZQute / BPNNDemoQt 
加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
克隆/下载
bpnetwork.cpp 17.72 KB
一键复制 编辑 原始数据 按行查看 历史
ZzqiZQute 提交于 2018-11-21 12:38 . 201811211238
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822
#include "bpnetwork.h"
#include <stdarg.h>
#include <stdlib.h>
#include <time.h>
#include <math.h>
BPNetwork::BPNetwork(int inputNodeCnt,int outputNodeCnt)
{
srand(static_cast<unsigned int>(time(nullptr)));
mInputNodeCnt=inputNodeCnt;
mOutputNodeCnt=outputNodeCnt;
mHiddenNodeCnt=nullptr;
mHiddenIn=nullptr;
mHiddenOut=nullptr;
mHiddenTmp=nullptr;
mBias=nullptr;
mHiddenLayerCnt=0;
mTrainCnt=0;
mWeight=nullptr;
mInputTmp=new double[inputNodeCnt];
mOutputTmp=new double[outputNodeCnt];
mInput=new double*[inputNodeCnt];
for(int i=0;i<inputNodeCnt;i++)
mInput[i]=nullptr;
mOutput=new double*[outputNodeCnt];
for(int i=0;i<outputNodeCnt;i++)
mOutput[i]=nullptr;
mOutputIn=new double[mOutputNodeCnt];
mActualOutput=new double[mOutputNodeCnt];
mErr=new double[mOutputNodeCnt];
mTestOutput=new double[mOutputNodeCnt];
}
BPNetwork::~BPNetwork()
{
if(mHiddenLayerCnt!=0)
{
if(mWeight!=nullptr)
{
for(int i=0;i<mHiddenLayerCnt+1;i++)
{
if(i==0)
{
for(int j=0;j<mInputNodeCnt;j++)
{
delete[] mWeight[i][j];
}
}
else{
for(int j=0;j<mHiddenNodeCnt[i-1];j++)
{
delete[] mWeight[i][j];
}
}
delete[] mWeight[i];
}
delete[] mWeight;
}
for(int i=0;i<mHiddenLayerCnt;i++)
{
delete[] mHiddenIn[i];
delete[] mHiddenOut[i];
delete[] mHiddenTmp[i];
delete[] mBias[i];
}
delete[] mBias[mHiddenLayerCnt];
delete[] mHiddenIn;
delete[] mHiddenOut;
delete[] mHiddenTmp;
delete[] mBias;
delete[] mHiddenNodeCnt;
}
delete[] mInputTmp;
delete[] mOutputTmp;
for(int i=0;i<mInputNodeCnt;i++)
if(mInput[i]!=nullptr)
delete[] mInput[i];
delete[] mInput;
for(int i=0;i<mOutputNodeCnt;i++)
if(mOutput[i]!=nullptr)
delete[] mOutput[i];
delete[] mOutput;
delete[] mOutputIn;
delete[] mActualOutput;
delete[] mErr;
delete[] mTestOutput;
}
int BPNetwork::inputNodeCnt() const
{
return mInputNodeCnt;
}
void BPNetwork::setInputNodeCnt(int inputNodeCnt)
{
mInputNodeCnt = inputNodeCnt;
}
int BPNetwork::outputNodeCnt() const
{
return mOutputNodeCnt;
}
void BPNetwork::setOutputNodeCnt(int outputNodeCnt)
{
mOutputNodeCnt = outputNodeCnt;
}
int BPNetwork::hiddenLayerCnt() const
{
return mHiddenLayerCnt;
}
void BPNetwork::setHiddenLayer(int hiddenLayerCnt,...)
{
int oriCnt=mHiddenLayerCnt;
mHiddenLayerCnt = hiddenLayerCnt;
if(mHiddenNodeCnt!=nullptr){
delete[] mHiddenNodeCnt;
mHiddenNodeCnt=nullptr;
}
mHiddenNodeCnt=new int[mHiddenLayerCnt];
va_list li;
va_start(li,hiddenLayerCnt);
for(int i=0;i<hiddenLayerCnt;i++){
mHiddenNodeCnt[i]=va_arg(li,int);
}
va_end(li);
if(mWeight!=nullptr)
{
for(int i=0;i<oriCnt+1;i++)
{
if(i==0)
{
for(int j=0;j<mInputNodeCnt;j++)
{
delete[] mWeight[i][j];
}
}
else{
for(int j=0;j<mHiddenNodeCnt[i-1];j++)
{
delete[] mWeight[i][j];
}
}
delete[] mWeight[i];
}
delete[] mWeight;
}
mWeight=new double**[mHiddenLayerCnt+1];
mWeight[0]=new double*[mInputNodeCnt];
for(int i=0;i<mInputNodeCnt;i++)
{
mWeight[0][i]=new double[mHiddenNodeCnt[0]];
}
for(int k=1;k<mHiddenLayerCnt;k++)
{
mWeight[k]=new double*[mHiddenNodeCnt[k-1]];
for(int i=0;i<mHiddenNodeCnt[k-1];i++)
{
mWeight[k][i]=new double[mHiddenNodeCnt[k]];
}
}
mWeight[mHiddenLayerCnt]=new double*[mHiddenNodeCnt[mHiddenLayerCnt-1]];
for(int i=0;i<mHiddenNodeCnt[mHiddenLayerCnt-1];i++)
{
mWeight[mHiddenLayerCnt][i]=new double[mOutputNodeCnt];
}
if(mHiddenIn!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mHiddenIn[i];
}
delete[] mHiddenIn;
}
if(mHiddenOut!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mHiddenOut[i];
}
delete[] mHiddenOut;
}
if(mHiddenTmp!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mHiddenTmp[i];
}
delete[] mHiddenTmp;
}
if(mBias!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mBias[i];
}
delete[] mBias;
}
mHiddenIn=new double*[mHiddenLayerCnt];
mHiddenOut=new double*[mHiddenLayerCnt];
mHiddenTmp=new double*[mHiddenLayerCnt];
mBias=new double*[mHiddenLayerCnt+1];
for(int i=0;i<mHiddenLayerCnt;i++)
{
mHiddenIn[i]=new double[mHiddenNodeCnt[i]];
mHiddenOut[i]=new double[mHiddenNodeCnt[i]];
mHiddenTmp[i]=new double[mHiddenNodeCnt[i]];
mBias[i]=new double[mHiddenNodeCnt[i]];
}
mBias[mHiddenLayerCnt]=new double[mOutputNodeCnt];
}
void BPNetwork::setHiddenLayer(int hiddenLayerCnt, int value[])
{
int oriCnt=mHiddenLayerCnt;
mHiddenLayerCnt = hiddenLayerCnt;
if(mHiddenNodeCnt!=nullptr){
delete[] mHiddenNodeCnt;
mHiddenNodeCnt=nullptr;
}
mHiddenNodeCnt=new int[mHiddenLayerCnt];
for(int i=0;i<hiddenLayerCnt;i++){
mHiddenNodeCnt[i]=value[i];
}
if(mWeight!=nullptr)
{
for(int i=0;i<oriCnt+1;i++)
{
if(i==0)
{
for(int j=0;j<mInputNodeCnt;j++)
{
delete[] mWeight[i][j];
}
}
else{
for(int j=0;j<mHiddenNodeCnt[i-1];j++)
{
delete[] mWeight[i][j];
}
}
delete[] mWeight[i];
}
delete[] mWeight;
}
mWeight=new double**[mHiddenLayerCnt+1];
mWeight[0]=new double*[mInputNodeCnt];
for(int i=0;i<mInputNodeCnt;i++)
{
mWeight[0][i]=new double[mHiddenNodeCnt[0]];
}
for(int k=1;k<mHiddenLayerCnt;k++)
{
mWeight[k]=new double*[mHiddenNodeCnt[k-1]];
for(int i=0;i<mHiddenNodeCnt[k-1];i++)
{
mWeight[k][i]=new double[mHiddenNodeCnt[k]];
}
}
mWeight[mHiddenLayerCnt]=new double*[mHiddenNodeCnt[mHiddenLayerCnt-1]];
for(int i=0;i<mHiddenNodeCnt[mHiddenLayerCnt-1];i++)
{
mWeight[mHiddenLayerCnt][i]=new double[mOutputNodeCnt];
}
if(mHiddenIn!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mHiddenIn[i];
}
delete[] mHiddenIn;
}
if(mHiddenOut!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mHiddenOut[i];
}
delete[] mHiddenOut;
}
if(mHiddenTmp!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mHiddenTmp[i];
}
delete[] mHiddenTmp;
}
if(mBias!=nullptr)
{
for(int i=0;i<oriCnt;i++)
{
delete[] mBias[i];
}
delete[] mBias;
}
mHiddenIn=new double*[mHiddenLayerCnt];
mHiddenOut=new double*[mHiddenLayerCnt];
mHiddenTmp=new double*[mHiddenLayerCnt];
mBias=new double*[mHiddenLayerCnt+1];
for(int i=0;i<mHiddenLayerCnt;i++)
{
mHiddenIn[i]=new double[mHiddenNodeCnt[i]];
mHiddenOut[i]=new double[mHiddenNodeCnt[i]];
mHiddenTmp[i]=new double[mHiddenNodeCnt[i]];
mBias[i]=new double[mHiddenNodeCnt[i]];
}
mBias[mHiddenLayerCnt]=new double[mOutputNodeCnt];
}
void BPNetwork::setSampleDim(int sampleDim)
{
mSampleDim = sampleDim;
}
void BPNetwork::setInput(int n, double* input)
{
if(n<mInputNodeCnt)
{
if(mInput[n]!=nullptr)
{
delete[] mInput[n];
mInput[n]=nullptr;
}
mInput[n]=input;
}else
{
exception("Input count error!");
}
}
void BPNetwork::setOutput(int n, double* output)
{
if(n<mOutputNodeCnt)
{
if(mOutput[n]!=nullptr)
{
delete[] mOutput[n];
mOutput[n]=nullptr;
}
mOutput[n]=output;
}else
{
exception("Output count error!");
}
}
void BPNetwork::reset()
{
mTrainCnt=0;
}
double BPNetwork::getLearnRate() const
{
return mLearnRate;
}
void BPNetwork::setLearnRate(double learnRate)
{
mLearnRate = learnRate;
}
int BPNetwork::getTrainCnt() const
{
return mTrainCnt;
}
void BPNetwork::resetTrainCnt()
{
mTrainCnt = 0;
}
double BPNetwork::activeFunc(double value,int dir)
{
if(dir==0)
{
return 1.0/(1.0+exp(-value));
}else
{
return value*(1.0-value);
}
}
void BPNetwork::randWeight()
{
for(int i=0;i<mInputNodeCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[0];j++)
mWeight[0][i][j]=static_cast<double>(rand())/RAND_MAX;
}
for(int k=1;k<mHiddenLayerCnt;k++)
{
for(int i=0;i<mHiddenNodeCnt[k-1];i++)
{
for(int j=0;j< mHiddenNodeCnt[k];j++)
mWeight[k][i][j]=static_cast<double>(rand())/RAND_MAX;
}
}
for(int i=0;i<mHiddenNodeCnt[mHiddenLayerCnt-1];i++)
{
for(int j=0;j<mOutputNodeCnt;j++)
mWeight[mHiddenLayerCnt][i][j]=static_cast<double>(rand())/RAND_MAX;
}
for(int i=0;i<mHiddenLayerCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[i];j++)
{
mBias[i][j]=static_cast<double>(rand())/RAND_MAX;
}
}
for(int j=0;j<mOutputNodeCnt;j++)
{
mBias[mHiddenLayerCnt][j]=static_cast<double>(rand())/RAND_MAX;
}
}
void BPNetwork::exception(std::string msg)
{
// qDebug()<<QString::fromStdString("Exception:"+msg);
throw "Exception:"+msg;
}
void BPNetwork::test(double *input)
{
//reset
for(int i=0;i<mHiddenLayerCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[i];j++)
{
mHiddenIn[i][j]=0;
}
}
for(int i=0;i<mOutputNodeCnt;i++)
{
mOutputIn[i]=0;
}
//reset end
for(int i=0;i<mHiddenNodeCnt[0];i++)
{
for(int j=0;j<mInputNodeCnt;j++)
{
mHiddenIn[0][i]+=mWeight[0][j][i]*input[j];
}
mHiddenIn[0][i]+=mBias[0][i];
}
for(int m=0;m<mHiddenLayerCnt-1;m++)
{
for(int i=0;i<mHiddenNodeCnt[m];i++)
{
mHiddenOut[m][i]=activeFunc(mHiddenIn[m][i],0);
}
for(int i=0;i<mHiddenNodeCnt[m+1];i++)
{
for(int j=0;j<mHiddenNodeCnt[m];j++)
{
mHiddenIn[m+1][i]+=mWeight[m+1][j][i]*mHiddenOut[m][j];
}
mHiddenIn[m+1][i]+=mBias[m+1][i];
}
}
for(int i=0;i<mHiddenNodeCnt[mHiddenLayerCnt-1];i++)
{
mHiddenOut[mHiddenLayerCnt-1][i]=activeFunc(mHiddenIn[mHiddenLayerCnt-1][i],0);
}
for(int i=0;i<mOutputNodeCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[mHiddenLayerCnt-1];j++)
{
mOutputIn[i]+=mWeight[mHiddenLayerCnt][j][i]*mHiddenOut[mHiddenLayerCnt-1][j];
}
mOutputIn[i]+=mBias[mHiddenLayerCnt][i];
}
for(int i=0;i<mOutputNodeCnt;i++)
{
if(mType==TYPE::REGRESSION)
mTestOutput[i]=activeFunc(mOutputIn[i],0);
else if(mType==TYPE::NORMAL)
mTestOutput[i]=mOutputIn[i];
}
}
int BPNetwork::getType() const
{
return mType;
}
void BPNetwork::setType(TYPE type)
{
mType = type;
}
double BPNetwork::getError()
{
double temp=0;
for(int j=0;j<mOutputNodeCnt;j++)
{
temp+=mErr[j]*mErr[j]/2;
}
return temp/mOutputNodeCnt;
}
void BPNetwork::fp()
{
//reset
for(int i=0;i<mHiddenLayerCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[i];j++)
{
mHiddenIn[i][j]=0;
}
}
for(int i=0;i<mOutputNodeCnt;i++)
{
mOutputIn[i]=0;
}
//reset end
for(int i=0;i<mHiddenNodeCnt[0];i++)
{
for(int j=0;j<mInputNodeCnt;j++)
{
mHiddenIn[0][i]+=mWeight[0][j][i]*mInputTmp[j];
}
mHiddenIn[0][i]+=mBias[0][i];
}
for(int m=0;m<mHiddenLayerCnt-1;m++)
{
for(int i=0;i<mHiddenNodeCnt[m];i++)
{
mHiddenOut[m][i]=activeFunc(mHiddenIn[m][i],0);
}
for(int i=0;i<mHiddenNodeCnt[m+1];i++)
{
for(int j=0;j<mHiddenNodeCnt[m];j++)
{
mHiddenIn[m+1][i]+=mWeight[m+1][j][i]*mHiddenOut[m][j];
}
mHiddenIn[m+1][i]+=mBias[m+1][i];
}
}
for(int i=0;i<mHiddenNodeCnt[mHiddenLayerCnt-1];i++)
{
mHiddenOut[mHiddenLayerCnt-1][i]=activeFunc(mHiddenIn[mHiddenLayerCnt-1][i],0);
}
for(int i=0;i<mOutputNodeCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[mHiddenLayerCnt-1];j++)
{
mOutputIn[i]+=mWeight[mHiddenLayerCnt][j][i]*mHiddenOut[mHiddenLayerCnt-1][j];
}
mOutputIn[i]+=mBias[mHiddenLayerCnt][i];
}
for(int i=0;i<mOutputNodeCnt;i++)
{
if(mType==TYPE::REGRESSION)
mActualOutput[i]=activeFunc(mOutputIn[i],0);
else if(mType==TYPE::NORMAL)
mActualOutput[i]=mOutputIn[i];
}
}
void BPNetwork::bp()
{
//初始化
for(int i=0;i<mHiddenLayerCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[i];j++)
{
mHiddenTmp[i][j]=0;
}
}
//计算输出层到最后一个隐藏层权重更新
double o=0;
for(int i=0;i<mOutputNodeCnt;i++)
{
if(mType==TYPE::REGRESSION)
o=activeFunc(mActualOutput[i],1)*mErr[i];
else if(mType==TYPE::NORMAL)
o=mErr[i];
for(int j=0;j<mHiddenNodeCnt[mHiddenLayerCnt-1];j++)
{
mHiddenTmp[mHiddenLayerCnt-1][j]+=o*mWeight[mHiddenLayerCnt][j][i];
mWeight[mHiddenLayerCnt][j][i]-=mLearnRate*o*mHiddenOut[mHiddenLayerCnt-1][j];
}
mBias[mHiddenLayerCnt][i]-=mLearnRate*o;
}
//计算最后一个隐藏层到第一个隐藏层的权重更新
for(int i=mHiddenLayerCnt-1;i>0;i--)
{
for(int j=0;j<mHiddenNodeCnt[i];j++)
{
o=activeFunc(mHiddenOut[i][j],1)*mHiddenTmp[i][j];
for(int l=0;l<mHiddenNodeCnt[i-1];l++)
{
mHiddenTmp[i-1][l]+=o*mWeight[i][l][j];
mWeight[i][l][j]-=mLearnRate*o*mHiddenOut[i-1][l];
}
mBias[i][j]-=mLearnRate*o;
}
}
//计算第一个隐藏层到输入层的权重更新
for(int i=0;i<mHiddenNodeCnt[0];i++)
{
o=activeFunc(mHiddenOut[0][i],1)*mHiddenTmp[0][i];
for(int j=0;j<mInputNodeCnt;j++)
{
mWeight[0][j][i]-=mLearnRate*o*mInputTmp[j];
}
mBias[0][i]-=mLearnRate*o;
}
}
void BPNetwork::calcErr()
{
for(int i=0;i<mOutputNodeCnt;i++)
{
mErr[i]=mActualOutput[i]-mOutputTmp[i];
}
}
double BPNetwork::train()
{
double err0=0;
mTrainCnt++;
for(int i=0;i<mSampleDim;i++)
{
for(int j=0;j<mInputNodeCnt;j++)
mInputTmp[j]=mInput[j][i];
for(int j=0;j<mOutputNodeCnt;j++)
mOutputTmp[j]=mOutput[j][i];
fp();
calcErr();
bp();
err0+=getError();
}
return err0;
}
void BPNetwork::copyWeightAndBias(BPNetwork *other)
{
for(int i=0;i<mInputNodeCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[0];j++)
mWeight[0][i][j]=other->mWeight[0][i][j];
}
for(int k=1;k<mHiddenLayerCnt;k++)
{
for(int i=0;i<mHiddenNodeCnt[k-1];i++)
{
for(int j=0;j< mHiddenNodeCnt[k];j++)
mWeight[k][i][j]=other->mWeight[k][i][j];
}
}
for(int i=0;i<mHiddenNodeCnt[mHiddenLayerCnt-1];i++)
{
for(int j=0;j<mOutputNodeCnt;j++)
mWeight[mHiddenLayerCnt][i][j]=other-> mWeight[mHiddenLayerCnt][i][j];
}
for(int i=0;i<mHiddenLayerCnt;i++)
{
for(int j=0;j<mHiddenNodeCnt[i];j++)
{
mBias[i][j]=other-> mBias[i][j];
}
}
for(int j=0;j<mOutputNodeCnt;j++)
{
mBias[mHiddenLayerCnt][j]=other-> mBias[mHiddenLayerCnt][j];
}
}
void BPNetwork::copyInit(BPNetwork *other)
{
int t=other->hiddenLayerCnt();
int* d=new int[t];
for(int i=0;i<t;i++)
{
d[i]=other->getHiddenNodeCnt()[i];
}
setHiddenLayer(t,d);
}
double *BPNetwork::getTestOutput() const
{
return mTestOutput;
}
double BPNetwork::getTestOutput(int i)
{
return mTestOutput[i];
}
int *BPNetwork::getHiddenNodeCnt() const
{
return mHiddenNodeCnt;
}
bool BPNetwork::checkSample()
{
if(mSampleDim<=0||mInput==nullptr||mOutput==nullptr)
{
exception("Error sample!");
return false;
}
else if(mInput!=nullptr)
{
for(int i=0;i<mInputNodeCnt;i++)
if(mInput[i]==nullptr)
{
exception("Error input sample!");
return false;
}
}
else if(mOutput!=nullptr)
{
for(int i=0;i<mOutputNodeCnt;i++)
if(mOutput[i]==nullptr)
{
exception("Error output sample!");
return false;
}
}
return true;
}
void BPNetwork::trainByTime(int time)
{
if(checkSample())
{
mTrainCnt=0;
while(time-->0)
train();
}
}
void BPNetwork::trainByErr(double err)
{
if(checkSample())
{
mTrainCnt=0;
double err0=err+1;
while(err0>=err)
err0=train();
}
}
void BPNetwork::trainByErrM(int m)
{
trainByErr(pow(10,-m));
}
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
C++
1
https://gitee.com/qt-open-source-collection/BPNNDemoQt.git
git@gitee.com:qt-open-source-collection/BPNNDemoQt.git
qt-open-source-collection
BPNNDemoQt
BPNNDemoQt
master

搜索帮助

344bd9b3 5694891 D2dac590 5694891