26 #include "configurationhelper.h"
32 #include <Eigen/Dense>
41 #define isnan(x) _isnan(x)
42 #define isinf(x) (!_finite(x))
44 #define isnan(x) std::isnan(x)
45 #define isinf(x) std::isinf(x)
53 #pragma warning(disable:4996)
66 freep =
new float[1000];
71 selectedp= (
float **) malloc(100 *
sizeof(
float **));
72 for (
int i = 0; i <
MAXN; i++) {
73 neuronlesion[i]=
false;
74 neuronlesionVal[i]=0.0;
78 nextStoredActivation = 0;
79 firstStoredActivation = 0;
91 if ( netFile !=
"" && (nSensors+nHiddens+nMotors)>0 ) {
92 Logger::error(
"Evonet - The information inside netFile will override any specification in all others parameters of Evonet" );
98 if ( netFile.isEmpty() ) {
103 nneurons = ninputs + nhiddens + noutputs;
104 if (this->nneurons >
MAXN) {
107 int inputNeuronType = 0;
109 if ( str == QString(
"no_delta") ) {
111 }
else if ( str == QString(
"with_delta") ) {
116 int hiddenNeuronType = 0;
118 if ( str == QString(
"logistic") ) {
119 hiddenNeuronType = 0;
120 }
else if ( str == QString(
"logistic+delta") ) {
121 hiddenNeuronType = 1;
122 }
else if ( str == QString(
"binary") ) {
123 hiddenNeuronType = 2;
124 }
else if ( str == QString(
"logistic_0.2") ) {
125 hiddenNeuronType = 3;
129 int outputNeuronType = 0;
131 if ( str == QString(
"no_delta") ) {
132 outputNeuronType = 0;
133 }
else if ( str == QString(
"with_delta") ) {
134 outputNeuronType = 1;
143 create_net_block( inputNeuronType, hiddenNeuronType, outputNeuronType, recurrentHiddens, inputOutputConnections, recurrentOutputs, biasOnHidden, biasOnOutput );
154 freep=
new float[nparameters+1000];
155 for(
int r=0;r<nparameters;r++)
159 phep=
new float[nparameters+1000];
160 for(
int r=0;r<nparameters;r++)
164 muts=
new float[nparameters+1000];
165 for(
int r=0;r<nparameters;r++)
168 if ( !netFile.isEmpty() ) {
183 QFileInfo fileNet( netFile );
184 QString filePhe = fileNet.baseName() +
".phe";
194 for(
int i = 0; i < nhiddens; i++) {
195 sprintf(neuronl[ninputs+i],
"h%d", i);
196 neuronrange[ninputs+i][0] = 0.0;
197 neuronrange[ninputs+i][1] = 1.0;
198 neurondcolor[ninputs+i] = QColor(125,125,125);
204 if ( netFile.isEmpty() ) {
205 params.
createParameter( prefix,
"nSensors", QString::number(ninputs) );
206 params.
createParameter( prefix,
"nHidden", QString::number(nhiddens) );
207 params.
createParameter( prefix,
"nMotors", QString::number(noutputs) );
211 params.
createParameter( prefix,
"weightRange", QString::number(wrange) );
212 params.
createParameter( prefix,
"gainRange", QString::number(grange) );
213 params.
createParameter( prefix,
"biasRange", QString::number(brange) );
221 d.
describeString(
"netFile" ).
help(
"The file .net where is defined the architecture to load. WARNING: when this parameter is specified any other parameters will be ignored" );
222 d.
describeReal(
"weightRange" ).
def(5.0f).
limits(1,+
Infinity).
help(
"The synpatic weight of the neural network can only assume values in [-weightRange, +weightRange]" );
225 d.
describeEnum(
"inputNeuronType" ).
def(
"no_delta").
values( QStringList() <<
"no_delta" <<
"with_delta" ).
help(
"The type of input neurons when the network is auto generated");
226 d.
describeEnum(
"hiddenNeuronType" ).
def(
"logistic").
values( QStringList() <<
"logistic" <<
"logistic+delta" <<
"binary" <<
"logistic_0.2" ).
help(
"The type of hidden neurons when the network is auto generated");
227 d.
describeEnum(
"outputNeuronType" ).
def(
"no_delta").
values( QStringList() <<
"no_delta" <<
"with_delta" ).
help(
"The type of output neurons when the network is auto generated");
228 d.
describeBool(
"recurrentHiddens" ).
def(
false).
help(
"when true generated a network with recurrent hidden neurons");
229 d.
describeBool(
"inputOutputConnections" ).
def(
false).
help(
"when true generated a network with input-output connections in addition to input-hidden-output connections");
230 d.
describeBool(
"recurrentOutputs" ).
def(
false).
help(
"when true generated a network with recurrent output neurons");
231 d.
describeBool(
"biasOnHiddenNeurons" ).
def(
true).
help(
"when true generate a network with hidden neurons with a bias");
232 d.
describeBool(
"biasOnOutputNeurons" ).
def(
true).
help(
"when true generate a network with output neurons with a bias");
239 void Evonet::create_net_block(
int inputNeuronType,
int hiddenNeuronType,
int outputNeuronType,
bool recurrentHiddens,
bool inputOutputConnections,
bool recurrentOutputs,
bool biasOnHidden,
bool biasOnOutput )
247 for(i = 0; i < this->ninputs; i++) {
248 this->neurontype[i]= inputNeuronType;
251 for(i = this->ninputs; i < (this->nneurons - this->noutputs); i++) {
252 this->neurontype[i]= hiddenNeuronType;
253 neuronbias[i] = (biasOnHidden) ? 1 : 0;
255 for(i = (this->nneurons - this->noutputs); i < this->nneurons; i++) {
256 this->neurontype[i]= outputNeuronType;
257 neuronbias[i] = (biasOnOutput) ? 1 : 0;
261 for(i=0; i < this->nneurons; i++) {
262 this->neurongain[i]= 0;
265 this->net_nblocks = 0;
267 this->net_block[this->net_nblocks][0] = 1;
268 this->net_block[this->net_nblocks][1] = 0;
269 this->net_block[this->net_nblocks][2] = this->ninputs;
270 this->net_block[this->net_nblocks][3] = 0;
271 this->net_block[this->net_nblocks][4] = 0;
272 this->net_block[this->net_nblocks][5] = 0;
276 if (this->nhiddens > 0) {
277 this->net_block[this->net_nblocks][0] = 0;
278 this->net_block[this->net_nblocks][1] = this->ninputs;
279 this->net_block[this->net_nblocks][2] = this->nhiddens;
280 this->net_block[this->net_nblocks][3] = 0;
281 this->net_block[this->net_nblocks][4] = this->ninputs;
282 this->net_block[this->net_nblocks][5] = 0;
287 if (recurrentHiddens) {
288 this->net_block[this->net_nblocks][0] = 0;
289 this->net_block[this->net_nblocks][1] = this->ninputs;
290 this->net_block[this->net_nblocks][2] = this->nhiddens;
291 this->net_block[this->net_nblocks][3] = this->ninputs;
292 this->net_block[this->net_nblocks][4] = this->nhiddens;
293 this->net_block[this->net_nblocks][5] = 0;
298 if (this->nhiddens > 0) {
299 this->net_block[this->net_nblocks][0] = 1;
300 this->net_block[this->net_nblocks][1] = this->ninputs;
301 this->net_block[this->net_nblocks][2] = this->nhiddens;
302 this->net_block[this->net_nblocks][3] = 0;
303 this->net_block[this->net_nblocks][4] = 0;
304 this->net_block[this->net_nblocks][5] = 0;
309 if (this->nhiddens == 0 || inputOutputConnections) {
310 this->net_block[this->net_nblocks][0] = 0;
311 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
312 this->net_block[this->net_nblocks][2] = this->noutputs;
313 this->net_block[this->net_nblocks][3] = 0;
314 this->net_block[this->net_nblocks][4] = this->ninputs;
315 this->net_block[this->net_nblocks][5] = 0;
320 if (this->nhiddens > 0) {
321 this->net_block[net_nblocks][0] = 0;
322 this->net_block[net_nblocks][1] = this->ninputs + this->nhiddens;
323 this->net_block[net_nblocks][2] = this->noutputs;
324 this->net_block[net_nblocks][3] = this->ninputs;
325 this->net_block[net_nblocks][4] = this->nhiddens;
326 this->net_block[this->net_nblocks][5] = 0;
331 if (recurrentOutputs) {
332 this->net_block[this->net_nblocks][0] = 0;
333 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
334 this->net_block[this->net_nblocks][2] = this->noutputs;
335 this->net_block[this->net_nblocks][3] = this->ninputs + this->nhiddens;
336 this->net_block[this->net_nblocks][4] = this->noutputs;
337 this->net_block[this->net_nblocks][5] = 0;
342 this->net_block[this->net_nblocks][0] = 1;
343 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
344 this->net_block[this->net_nblocks][2] = this->noutputs;
345 this->net_block[this->net_nblocks][3] = 0;
346 this->net_block[this->net_nblocks][4] = 0;
347 this->net_block[this->net_nblocks][5] = 0;
353 if (this->ninputs > this->noutputs) {
356 startx = ((this->noutputs - this->ninputs) / 2) * dx + 50;
358 for(i = 0; i < this->ninputs; i++, n++) {
359 this->neuronxy[n][0] = (i * dx) + startx;
360 this->neuronxy[n][1] = 400;
364 startx = this->ninputs * dx;
365 for(i=0; i < (this->nneurons - (this->ninputs + this->noutputs)); i++, n++) {
366 this->neuronxy[n][0] = startx + (i * dx);
367 this->neuronxy[n][1] = 225;
371 if (this->ninputs > this->noutputs) {
372 startx = ((this->ninputs - this->noutputs) / 2) * dx + 50;
376 for(i=0; i < this->noutputs; i++, n++) {
377 this->neuronxy[n][0] = startx + (i * dx);
378 this->neuronxy[n][1] = 50;
382 for(i=0; i < this->nneurons; i++) {
383 this->neurondisplay[i] = 1;
387 drawnymax = 400 + 30;
388 for(i = 0, drawnxmax = 0; i < nneurons; i++) {
389 if (neuronxy[i][0] > drawnxmax) {
390 drawnxmax = neuronxy[i][0];
417 const int bufferSize = 128;
418 char cbuffer[bufferSize];
420 if ((fp = fopen(filename,
"r")) != NULL)
422 fscanf(fp,
"ARCHITECTURE\n");
423 fscanf(fp,
"nneurons %d\n", &nneurons);
424 fscanf(fp,
"nsensors %d\n", &ninputs);
425 fscanf(fp,
"nmotors %d\n", &noutputs);
427 Logger::error(
"Evonet - increase MAXN to support more than "+QString::number(
MAXN)+
" neurons" );
428 nhiddens = nneurons - (ninputs + noutputs);
429 fscanf(fp,
"nblocks %d\n", &net_nblocks);
430 for (b=0; b < net_nblocks; b++)
432 fscanf(fp,
"%d %d %d %d %d %d", &net_block[b][0],&net_block[b][1],&net_block[b][2],&net_block[b][3],&net_block[b][4], &net_block[b][5]);
433 if (net_block[b][0] == 0)
434 fscanf(fp,
" // connections block\n");
435 if (net_block[b][0] == 1)
436 fscanf(fp,
" // block to be updated\n");
437 if (net_block[b][0] == 2)
438 fscanf(fp,
" // gain block\n");
439 if (net_block[b][0] == 3)
440 fscanf(fp,
" // modulated gain block\n");
443 fscanf(fp,
"neurons bias, delta, gain, xy position, display\n");
446 for(n=0; n < nneurons; n++)
448 fscanf(fp,
"%d %d %d %d %d %d\n", &neuronbias[n], &neurontype[n], &neurongain[n], &neuronxy[n][0], &neuronxy[n][1], &neurondisplay[n]);
449 if(drawnxmax < neuronxy[n][0])
450 drawnxmax = neuronxy[n][0];
451 if(drawnymax < neuronxy[n][1])
452 drawnymax = neuronxy[n][1];
459 fscanf(fp,
"FREE PARAMETERS %d\n", &np);
460 if (nparameters != np) {
461 Logger::error(QString(
"ERROR: parameters defined are %1 while %2 contains %3 parameters").arg(nparameters).arg(filename).arg(np));
468 while (fgets(cbuffer,bufferSize,fp) != NULL && i < np)
471 QString line = cbuffer;
472 QStringList lineContent = line.split(QRegExp(
"\\s+"), QString::SkipEmptyParts);
474 bool floatOnSecondPlace =
false;
475 lineContent[1].toFloat(&floatOnSecondPlace);
477 if(lineContent.contains(
"*") || floatOnSecondPlace)
478 readNewPheLine(lineContent, ph, mu);
480 readOldPheLine(lineContent, ph, mu);
493 Logger::info(
"Evonet - loaded file " + QString(filename) );
498 Logger::warning(
"Evonet - File " + QString(filename) +
" not found" );
503 void Evonet::readOldPheLine(QStringList line,
float* par,
float* mut)
505 *par = line[0].toFloat();
512 void Evonet::readNewPheLine(QStringList line,
float* par,
float* mut)
535 *par = line[0].toFloat();
542 *mut = line[1].toFloat();
549 void Evonet::save_net_blocks(
const char *filename,
int mode)
557 char* default_string =
"*\t\t";
558 char **p =
new char*[freeParameters()];
559 char **mu =
new char*[freeParameters()];
560 for(
int h=0; h<freeParameters(); h++) {
561 mu[h] =
new char[50];
565 mu[h] = default_string;
567 sprintf(mu[h],
"%f", muts[h]);
571 p[h] = default_string;
573 sprintf(p[h],
"%f", freep[h]);
577 if ((fp = fopen(filename,
"w")) != NULL) {
578 fprintf(fp,
"ARCHITECTURE\n");
579 fprintf(fp,
"nneurons %d\n", nneurons);
580 fprintf(fp,
"nsensors %d\n", ninputs);
581 fprintf(fp,
"nmotors %d\n", noutputs);
582 fprintf(fp,
"nblocks %d\n", net_nblocks);
583 for (b = 0; b < net_nblocks; b++) {
584 fprintf(fp,
"%d %d %d %d %d %d", net_block[b][0],net_block[b][1],net_block[b][2],net_block[b][3],net_block[b][4],net_block[b][5]);
585 if (net_block[b][0] == 0) {
586 fprintf(fp,
" // connections block\n");
587 }
else if (net_block[b][0] == 1) {
588 fprintf(fp,
" // block to be updated\n");
589 }
else if (net_block[b][0] == 2) {
590 fprintf(fp,
" // gain block\n");
591 }
else if (net_block[b][0] == 3) {
592 fprintf(fp,
" // modulated gain block\n");
595 fprintf(fp,
"neurons bias, delta, gain, xy position, display\n");
596 for(n = 0; n < nneurons; n++) {
597 fprintf(fp,
"%d %d %d %d %d %d\n", neuronbias[n], neurontype[n], neurongain[n], neuronxy[n][0], neuronxy[n][1], neurondisplay[n]);
602 fprintf(fp,
"FREE PARAMETERS %d\n", nparameters);
603 for(i = 0; i < nneurons; i++) {
604 if (neurongain[i] == 1) {
605 fprintf(fp,
"%s \t %s \tgain %s\n",*p, *mu, neuronl[i]);
610 for(i=0; i<nneurons; i++) {
611 if (neuronbias[i] == 1) {
612 fprintf(fp,
"%s \t %s \tbias %s\n",*p, *mu, neuronl[i]);
617 for (b=0; b < net_nblocks; b++) {
618 if (net_block[b][0] == 0) {
619 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
620 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
621 fprintf(fp,
"%s \t %s \tweight %s from %s\n",*p, *mu, neuronl[t], neuronl[i]);
626 }
else if (net_block[b][0] == 1) {
627 for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
628 if (neurontype[t] == 1) {
630 if(*p != default_string) {
632 timeC = fabs(timeC)/wrange;
635 fprintf(fp,
"%s \t %s \ttimeconstant %s (%f)\n", *p, *mu, neuronl[t], timeC);
645 Logger::info(
"Evonet - controller saved on file " + QString(filename) );
647 Logger::error(
"Evonet - unable to create the file " + QString(filename) );
655 float Evonet::logistic(
float f)
657 return((
float) (1.0 / (1.0 + exp(0.0 - f))));
662 return 2.0/(1.0+exp(-2.0*f))-1.0;
668 void Evonet::computeParameters()
678 for(i=0;i < nneurons;i++) {
682 for(i=0;i < nneurons;i++) {
683 if (neurongain[i] == 1) {
688 for(i=0;i < nneurons;i++) {
689 if (neuronbias[i] == 1) {
694 for(i=0;i < nneurons;i++) {
695 if (neurontype[i] == 1) {
700 for (b=0; b < net_nblocks; b++) {
702 if (net_block[b][0] == 0) {
703 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
704 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
712 for(i=0;i < nneurons;i++) {
713 if (updated[i] < 1 && nwarnings == 0) {
714 Logger::warning(
"Evonet - neuron " + QString::number(i) +
" will never be activated according to the current architecture" );
717 if (updated[i] > 1 && nwarnings == 0) {
718 Logger::warning(
"Evonet - neuron " + QString::number(i) +
" will be activated more than once according to the current architecture" );
725 void Evonet::updateNet()
732 float netinput[
MAXN];
739 for(i=0;i < nneurons;i++) {
740 if (neurongain[i] == 1) {
741 gain[i] = (float) (fabs((
double) *p) / wrange) * grange;
753 for(i=0;i < nneurons;i++) {
754 if (neuronbias[i] == 1) {
756 netinput[i] = ((double)*p/wrange)*brange;
766 for (b=0; b < net_nblocks; b++) {
768 if (net_block[b][0] == 0) {
769 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
770 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
771 netinput[t] += act[i] * gain[i] * *p;
778 if (net_block[b][0] == 2) {
779 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
780 gain[t] = gain[net_block[b][1]];
784 if (net_block[b][0] == 3) {
785 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
786 gain[t] = act[net_block[b][3]];
790 if (net_block[b][0] == 1) {
791 for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
793 switch(neurontype[t]) {
798 delta = (float) (fabs((
double) *p) / wrange);
800 act[t] = (act[t] * delta) + (input[t] * (1.0f - delta));
803 if(neuronlesions > 0 && neuronlesion[t]) {
804 act[t]= (float)neuronlesionVal[t];
807 switch(neurontype[t]) {
810 act[t] = logistic(netinput[t]);
814 delta = (float) (fabs((
double) *p) / wrange);
816 act[t] = (act[t] * delta) + (logistic(netinput[t]) * (1.0f - delta));
819 if (netinput[t] >= 0.0) {
826 act[t] = logistic(netinput[t]*0.2f);
830 if(neuronlesions > 0 && neuronlesion[t]) {
831 act[t]= (float)neuronlesionVal[t];
839 memcpy(storedActivations[nextStoredActivation], act, nneurons *
sizeof(
float));
841 if (firstStoredActivation == nextStoredActivation) {
852 int Evonet::setInput(
int inp,
float value)
854 if (inp>=ninputs || inp<0) {
861 float Evonet::getOutput(
int out)
866 return act[ninputs+nhiddens+out];
869 float Evonet::getInput(
int in)
871 return this->input[in];
874 float Evonet::getNeuron(
int in)
879 void Evonet::resetNet()
882 for (i = 0; i <
MAXN; i++) {
890 void Evonet::injectHidden(
int nh,
float val)
893 act[this->ninputs+nh] = val;
897 float Evonet::getHidden(
int h)
899 if(h<nhiddens && h>=0) {
900 return act[this->ninputs+h];
906 int Evonet::freeParameters()
908 return this->nparameters;
911 float Evonet::getFreeParameter(
int i)
916 bool Evonet::pheFileLoaded()
924 void Evonet::setParameters(
const int *dt)
930 for (i=0; i<freeParameters(); i++, p++) {
931 *p = wrange - ((float)dt[i]/geneMaxValue)*wrange*2;
936 void Evonet::getMutations(
float* GAmut)
939 for(
int i=0; i<freeParameters(); i++) {
944 void Evonet::copyPheParameters(
int* pheGene)
946 for(
int i=0; i<freeParameters(); i++)
951 pheGene[i] = (int)((wrange - phep[i])*geneMaxValue/(2*wrange));
956 void Evonet::printIO()
961 for (
int in = 0; in < this->ninputs; in++) {
962 output += QString(
"%1 ").arg(this->input[in], 0,
'f', 10);
965 for (
int hi = this->ninputs; hi < (this->nneurons - this->noutputs); hi++) {
966 output += QString(
"%1 ").arg(this->act[hi], 0,
'f', 10);
969 for (
int out = 0; out < this->noutputs; out++) {
970 output += QString(
"%1 ").arg(this->act[this->ninputs+this->nhiddens+out], 0,
'f', 10);
977 int Evonet::getParamBias(
int nbias)
980 if (nbias<nparambias && nbias>-1) {
981 pb=(int) freep[nparambias+nbias];
986 float Evonet::getWrange()
992 void Evonet::printBlocks()
994 Logger::info(
"Evonet - ninputs " + QString::number(this->ninputs));
995 Logger::info(
"Evonet - nhiddens " + QString::number(this->nhiddens));
996 Logger::info(
"Evonet - noutputs " + QString::number(this->noutputs));
997 Logger::info(
"Evonet - nneurons " + QString::number(this->nneurons));
999 for(
int i=0;i<this->net_nblocks;i++) {
1000 Logger::info( QString(
"Evonet Block - %1 | %2 - %3 -> %4 - %5 | %6" )
1001 .arg(net_block[i][0])
1002 .arg(net_block[i][1])
1003 .arg(net_block[i][2])
1004 .arg(net_block[i][3])
1005 .arg(net_block[i][4])
1006 .arg(net_block[i][5]));
1010 int Evonet::getNoInputs()
1015 int Evonet::getNoHiddens()
1020 int Evonet::getNoOutputs()
1025 int Evonet::getNoNeurons()
1030 void Evonet::setRanges(
double weight,
double bias,
double gain)
1039 if (firstStoredActivation == nextStoredActivation) {
1043 const int ret = firstStoredActivation;
1045 return storedActivations[ret];
1049 return updatescounter;
1052 void Evonet::printWeights() {
1053 for (
int i = 0; i<freeParameters(); i++) {
1054 printf(
"%f\n",freep[i]);
1058 void Evonet::initWeightsInRange(
float min,
float max) {
1060 float range = max-
min;
1062 for (
int i = 0; i<freeParameters(); i++) {
1063 freep[i] = (((float) rand())/RAND_MAX)*range +
min;
1084 void Evonet::initWeightsNguyenWidrow(
float min,
float max) {
1086 initWeightsInRange(min, max);
1088 double beta = 0.7 * pow(nhiddens, 1.0/ninputs);
1093 for (
int i =0; i<nhiddens; i++) {
1094 for (
int j = 0; j<ninputs; j++) {
1095 tmp = getWeight(i+ninputs, j);
1099 if (neuronbias[i+ninputs]) {
1102 for (
int j = 0; j<i+ninputs; j++) {
1107 norm += freep[ptr]*freep[ptr];
1115 double k = beta/norm;
1117 for (
int i =0; i<nhiddens; i++) {
1118 for (
int j = 0; j<ninputs; j++) {
1119 setWeight(i+ninputs, j, getWeight(i+ninputs, j)*k);
1122 if (neuronbias[i+ninputs]) {
1125 for (
int j = 0; j<i+ninputs; j++) {
1138 void Evonet::hardwire() {
1146 for (
int i = 0; i<nneurons; i++) {
1147 if (neuronbias[i]) {
1159 for (
int b=0; b<net_nblocks; b++) {
1160 for (
int i=0; i<net_block[b][2]*net_block[b][4]; i++) {
1161 if (net_block[b][0] == 0 && net_block[b][5]==1){
1165 else if (net_block[b][0] == 0 && net_block[b][5]==0){
1173 for (
int i = 0; i<nneurons; i++)
1189 #define bpdebug(x,...) printf(x,##__VA_ARGS__)
1190 #define debug(x,...) printf(x,##__VA_ARGS__)
1192 #define bpdebug(x,...)
1193 #define debug(x,...)
1196 #define inRange(x,y,z) ( (x) >= (y) && (x) < (y)+(z) )
1198 int Evonet::isHidden(
int neuron){
1199 return neuron >= ninputs && neuron < ninputs+nhiddens;
1375 void Evonet::printAct() {
1377 for (
int i = 0; i<nneurons; i++) {
1378 printf(
"act[%d]: %f\n",i,act[i]);
1384 float Evonet::computeMeanSquaredError(QVector<float> trainingSet, QVector<float> desiredOutput) {
1387 int size = trainingSet.size()/ninputs;
1393 for (
int i = 0; i<nneurons; i++) {
1398 for (
int i = 0; i<size; i++) {
1400 for (
int j = 0; j<ninputs; j++) {
1401 setInput(j, trainingSet[i*ninputs + j]);
1406 for (
int j=0; j<noutputs; j++) {
1407 if (!outputsToTrain[j])
1410 tmp = desiredOutput[ptr++] - act[j+ninputs+nhiddens];
1412 err += tmp*tmp*err_weights[j]*err_weights[j];
1424 return err / (err_weight_sum*size);
1428 int Evonet::extractWeightsFromNet(Eigen::VectorXf& w) {
1435 int wPtr = 0, paramPtr = 0;
1438 for (
int i = 0; i<nneurons; i++) {
1439 nbiases += (neuronbias[i]==1);
1447 for (
int b = 0; b<net_nblocks; b++) {
1454 if (net_block[b][0] != 0)
1457 if (net_block[b][5]==1 && !(net_block[b][1]>=ninputs+nhiddens)) {
1458 for (
int i = net_block[b][1]; i<net_block[b][1]+net_block[b][2]; i++) {
1460 if (neuronbias[i]) {
1469 for (
int j = 0; j<i; j++) {
1474 debug(
"Adding bias of neuron %d (freep[%d]) in w[%d]\n",i,ptr,wPtr);
1475 w[wPtr++] = freep[ptr];
1483 for (
int j = 0; j<net_block[b][4]; j++) {
1484 debug(
"Adding connection %d of neuron %d (freep[%d]) in w[%d]\n",j,i,paramPtr,wPtr);
1485 w[wPtr++] = freep[paramPtr++];
1490 paramPtr+= net_block[b][2]*net_block[b][4];
1499 for (
int i = 0; i<noutputs; i++) {
1501 if (!outputsToTrain[i])
1504 int i_freep = i+ninputs+nhiddens;
1506 if (neuronbias[i_freep]) {
1515 for (
int j = 0; j<i_freep; j++) {
1520 debug(
"Adding bias of output %d (freep[%d]) in w[%d]\n",i,ptr,wPtr);
1521 w[wPtr++] = freep[ptr];
1526 for (
int b = 0; b<net_nblocks; b++) {
1528 debug(
"Accessing trainingHiddenBlock[net_block[%d][3] = %d][%d]\n",b,net_block[b][3],i);
1529 if(!(trainingHiddenBlock[net_block[b][3]][i] && inRange(net_block[b][1], ninputs+nhiddens, noutputs) )) {
1531 paramPtr+= net_block[b][2]*net_block[b][4];
1533 debug(
"\tparamPtr: %d\n", paramPtr);
1541 for (
int j = 0; j<net_block[b][4]; j++) {
1542 debug(
"Adding connection %d of output %d (freep[%d]) in w[%d]\n",j,i_freep,(i_freep-net_block[b][1])*net_block[b][4] + paramPtr,wPtr);
1543 w[wPtr++] = freep[(i_freep-net_block[b][1])*net_block[b][4] + paramPtr++];
1549 for (
int i = 0; i<w.size(); i++) {
1557 int Evonet::importWeightsFromVector(Eigen::VectorXf& w) {
1563 int wPtr = 0, paramPtr = 0;
1566 for (
int i = 0; i<nneurons; i++) {
1567 nbiases += (neuronbias[i]==1);
1575 for (
int b = 0; b<net_nblocks; b++) {
1582 if (net_block[b][0] != 0)
1585 if (net_block[b][5]==1 && !(net_block[b][1]>=ninputs+nhiddens)) {
1586 for (
int i = net_block[b][1]; i<net_block[b][1]+net_block[b][2]; i++) {
1588 if (neuronbias[i]) {
1597 for (
int j = 0; j<i; j++) {
1602 debug(
"Adding bias of neuron %d (w[%d]) in freep[%d]\n",i,wPtr,ptr);
1603 freep[ptr] = w[wPtr++];
1611 for (
int j = 0; j<net_block[b][4]; j++) {
1612 debug(
"Adding connection %d of neuron %d (w[%d]) in freep[%d]\n",j,i,wPtr,paramPtr);
1613 freep[paramPtr++] = w[wPtr++];
1618 paramPtr+= net_block[b][2]*net_block[b][4];
1627 for (
int i = 0; i<noutputs; i++) {
1629 if (!outputsToTrain[i])
1632 int i_freep = i+ninputs+nhiddens;
1634 if (neuronbias[i_freep]) {
1643 for (
int j = 0; j<i_freep; j++) {
1647 debug(
"Adding bias of output %d (w[%d]) in freep[%d]\n",i,wPtr,ptr);
1648 freep[ptr] = w[wPtr++];
1653 for (
int b = 0; b<net_nblocks; b++) {
1655 if(!(trainingHiddenBlock[net_block[b][3]][i] && inRange(net_block[b][1], ninputs+nhiddens, noutputs) )) {
1657 paramPtr+= net_block[b][2]*net_block[b][4];
1665 for (
int j = 0; j<net_block[b][4]; j++) {
1666 debug(
"Adding connection %d of output %d (w[%d]) in freep[%d]\n",j,i_freep,wPtr,(i_freep-net_block[b][1])*net_block[b][4] + paramPtr);
1667 freep[(i_freep-net_block[b][1])*net_block[b][4] + paramPtr++] = w[wPtr++];
1676 float Evonet::getWeight(
int to,
int from) {
1678 debug(
"Getting w to %d from %d\n", to,from);
1680 for (
int i = 0; i<nneurons; i++) {
1681 ptr += neuronbias[i]==1;
1684 for (
int b = 0; b<net_nblocks; b++) {
1685 if (inRange(to, net_block[b][1], net_block[b][2]) && inRange(from, net_block[b][3], net_block[b][4])) {
1686 ptr+= (to-net_block[b][1])*net_block[b][4]+(from-net_block[b][3]);
1689 ptr+= net_block[b][2]*net_block[b][4];
1693 debug(
"Returning freep[%d]\n", ptr);
1694 if (ptr >= freeParameters()) {
1701 void Evonet::setWeight(
int to,
int from,
float w) {
1704 for (
int i = 0; i<nneurons; i++) {
1705 ptr += neuronbias[i]==1;
1708 for (
int b = 0; b<net_nblocks; b++) {
1709 if (inRange(to, net_block[b][1], net_block[b][2]) && inRange(from, net_block[b][3], net_block[b][4])) {
1710 ptr+= (to-net_block[b][1])*net_block[b][4]+(from-net_block[b][3]);
1713 ptr+= net_block[b][2]*net_block[b][4];
1722 float Evonet::derivative(
int n,
float x) {
1728 void Evonet::prepareForTraining(QVector<float> &err_w) {
1742 outputsToTrain = (
char*)calloc(noutputs,
sizeof(
char));
1743 n_outputsToTrain = 0;
1745 trainingHiddenBlock = (
char**)calloc(nneurons,
sizeof(
char*));
1746 for (
int i = 0; i<nneurons; i++) {
1747 trainingHiddenBlock[i] = (
char*) calloc(noutputs,
sizeof(
char));
1750 for (
int b=0; b<net_nblocks; b++)
1752 if (net_block[b][0]==0 && net_block[b][5]==1) {
1758 nconnections += (net_block[b][2]*net_block[b][4]);
1764 for (
int i = net_block[b][1]; i<net_block[b][1]+net_block[b][2]; i++) {
1765 nconnections += (neuronbias[i] == 1);
1768 if (net_block[b][1] >= ninputs+nhiddens) {
1769 memset(outputsToTrain+net_block[b][1]-ninputs-nhiddens, 1, net_block[b][2]*
sizeof(
char));
1770 n_outputsToTrain += net_block[b][2];
1772 for(
int j=0;j<net_block[b][4];j++)
1773 memset(&trainingHiddenBlock[net_block[b][3]+j][net_block[b][1]-ninputs-nhiddens], 1, net_block[b][2]*
sizeof(
char));
1779 printf(
"n_outputToTrain: %d\n",n_outputsToTrain);
1780 printf(
"output to train: ");
1782 for (
int i = 0; i<noutputs; i++) {
1783 printf(
"%d ",outputsToTrain[i]);
1787 for (
int j = 0; j<nneurons; j++) {
1788 for (
int i = 0; i<noutputs; i++) {
1789 printf(
"%d ",trainingHiddenBlock[j][i]);
1794 debug(
"nconnections: %d\n", nconnections);
1797 for (
int i = 0; i<err_w.size(); i++) {
1798 err_weights.push_back(err_w[i]*err_w[i]);
1799 err_weight_sum+=err_w[i];
1802 printf(
"err_weight_sum : %f\n",err_weight_sum);
1806 void Evonet::endTraining() {
1807 free(outputsToTrain);
1808 for(
int i=0;i<nneurons;i++)
1809 free(trainingHiddenBlock[i]);
1810 free(trainingHiddenBlock);
1814 float Evonet::trainLevembergMarquardt(QVector<float> trainingSet, QVector<float> desiredOutput,
float maxError) {
1822 double lambda=0.001;
1823 double currentError = 0, previousError= 0;
1826 if (nconnections == 0) {
1828 printf(
"nconnections: 0\nnothing to train\n");
1833 for (i = 0; i<nneurons; i++) {
1834 nbiases += neuronbias[i]==1;
1837 int size = trainingSet.size() / ninputs;
1838 debug(
"npatters: %d\n", size);
1840 Eigen::VectorXf err(size*n_outputsToTrain);
1841 Eigen::MatrixXf jacobian(size*n_outputsToTrain, nconnections );
1844 Eigen::MatrixXf jj(nconnections,nconnections);
1846 Eigen::VectorXf new_weights(nconnections);
1847 Eigen::VectorXf old_weights(nconnections);
1848 Eigen::VectorXf ww_err(nconnections);
1851 previousError = computeMeanSquaredError(trainingSet, desiredOutput);
1852 printf(
"Initial error: %f\n",previousError);
1862 for (cycles = 0; cycles<end; cycles++) {
1866 extractWeightsFromNet(old_weights);
1867 debug(
"weights extracted\n");
1873 for (pattern=0; pattern<size; pattern++) {
1875 debug(
"\n\n------------\n\n");
1876 debug(
"\tpattern: %d\n", pattern);
1884 for (i = 0; i<ninputs; i++) {
1885 setInput(i, trainingSet[pattern*ninputs + i]);
1899 for(
int m = noutputs-1; m>=0; m--) {
1900 if (!outputsToTrain[m])
1903 int m_freep = m+ninputs+nhiddens;
1905 int col_idx = nconnections - 1;
1906 int row_idx = n_outputsToTrain*pattern-1;
1908 for (i = 0; i<=m; i++) {
1909 row_idx+= outputsToTrain[i];
1916 err[row_idx] = (desiredOutput[row_idx] - act[m_freep])*err_weights[m];
1917 delta = -derivative(m_freep, act[m_freep])*err_weights[m];
1923 for(i = noutputs-1; i>=0; i--) {
1925 if (!outputsToTrain[i])
1931 for (b=net_nblocks-1; b>=0; b--) {
1937 if (trainingHiddenBlock[net_block[b][3]][m] && net_block[b][5]==1) {
1939 for (j=net_block[b][3]+net_block[b][4] -1; j>=net_block[b][3]; j--) {
1941 jacobian(row_idx, col_idx--) = delta *act[j];
1942 debug(
"\t\tcol_idx: %d\n", col_idx+1);
1943 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta,act[j],delta*act[j]);
1946 jacobian(row_idx, col_idx--) = 0;
1947 debug(
"\t\tcol_idx: %d\n", col_idx+1);
1948 debug(
"\t\tjacobian(%d,%d) = 0\n", row_idx,col_idx+1);
1961 if (neuronbias[i+ninputs+nhiddens]) {
1962 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,delta);
1963 jacobian(row_idx, col_idx--) = (i==m ? delta : 0);
1975 debug(
"\nBackward computation: hidden layer\n");
1978 for (b=net_nblocks-1; b>=0; b--) {
1985 debug(
"\ttrainingHiddenBlock[%d][%d]: %d\n", net_block[b][1],m,trainingHiddenBlock[net_block[b][1]][m]);
1986 if (net_block[b][0]!=0 || net_block[b][5] !=1 || ! trainingHiddenBlock[net_block[b][1]][m] )
1993 for(j = net_block[b][1]+net_block[b][2]-1; j>= net_block[b][1]; j--) {
1995 double delta_h = delta* getWeight(m_freep, j) * derivative(j, act[j]);
1997 for (
int k = net_block[b][3] + net_block[b][4]-1; k>= net_block[b][3]; k--) {
1998 jacobian(row_idx, col_idx--) = delta_h * act[k];
1999 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta_h,act[k],delta*act[k]);
2002 if (neuronbias[j]) {
2003 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,delta_h);
2004 jacobian(row_idx, col_idx--) = delta_h;
2020 debug(
"\tAll rows analyzed\n");
2034 if (lambda > 100000000 || lambda < 0.000001) {
2040 ww_err = jacobian.transpose()*err;
2041 jj = jacobian.transpose()*jacobian;
2077 for (
int retry = 0; retry<6; retry++, lambda*=10) {
2079 debug(
"\tlambda: %f\n", lambda);
2082 new_weights = old_weights - (jj + lambda*Eigen::MatrixXf::Identity(nconnections,nconnections) ).ldlt().solve(ww_err);
2091 importWeightsFromVector(new_weights);
2093 currentError = computeMeanSquaredError(trainingSet, desiredOutput);
2095 printf(
"iteration: %d err: %f lambda: %f\n",cycles,currentError,lambda);
2097 debug(
"currentError: %f\n",currentError);
2099 if (currentError <= maxError)
2100 return currentError;
2101 if ((new_weights-old_weights).norm() < 0.0001) {
2102 printf(
"Minimum gradient reached\n");
2103 return currentError;
2106 if (currentError > previousError) {
2107 importWeightsFromVector(old_weights);
2110 previousError = currentError;
2120 return currentError;
2124 float Evonet::trainLevembergMarquardtThroughTime(QVector<float> trainingSet, QVector<float> desiredOutput,
int time,
float maxError) {
2132 double lambda=0.001;
2133 double currentError = 0, previousError= 0;
2136 if (nconnections == 0) {
2138 printf(
"nconnections: 0\nnothing to train\n");
2143 for (i = 0; i<nneurons; i++) {
2144 nbiases += neuronbias[i]==1;
2147 int size = trainingSet.size() / ninputs;
2148 debug(
"npatters: %d\n", size);
2150 Eigen::VectorXf oldActivations(time*nhiddens);
2151 oldActivations.setZero();
2153 Eigen::VectorXf err(size*n_outputsToTrain);
2154 Eigen::MatrixXf jacobian(size*n_outputsToTrain, nconnections );
2157 Eigen::MatrixXf jj(nconnections,nconnections);
2159 Eigen::VectorXf new_weights(nconnections);
2160 Eigen::VectorXf old_weights(nconnections);
2161 Eigen::VectorXf ww_err(nconnections);
2165 previousError = computeMeanSquaredError(trainingSet, desiredOutput);
2166 printf(
"Initial error: %f\n",previousError);
2176 for (cycles = 0; cycles<end; cycles++) {
2180 extractWeightsFromNet(old_weights);
2181 debug(
"weights extracted\n");
2187 for (pattern=0; pattern<size; pattern++) {
2189 debug(
"\n\n------------\n\n");
2190 debug(
"\tpattern: %d\n", pattern);
2198 for (i = 0; i<ninputs; i++) {
2199 setInput(i, trainingSet[pattern*ninputs + i]);
2213 for(
int m = noutputs-1; m>=0; m--) {
2215 debug(
"m: %d\n", m);
2216 if (!outputsToTrain[m])
2219 int m_freep = m+ninputs+nhiddens;
2221 int col_idx = nconnections - 1;
2222 int row_idx = n_outputsToTrain*pattern-1;
2224 debug(
"row_idx: %d\n", row_idx);
2225 for (i = 0; i<=m; i++) {
2226 row_idx+= outputsToTrain[i];
2228 debug(
"row_idx: %d\n", row_idx);
2233 err[row_idx] = (desiredOutput[row_idx] - act[m_freep])*err_weights[m];
2234 delta = -derivative(m_freep, act[m_freep])*err_weights[m];
2240 for(i = noutputs-1; i>=0; i--) {
2242 debug(
"\toutput: %d\n", i);
2243 if (!outputsToTrain[i])
2249 for (b=net_nblocks-1; b>=0; b--) {
2255 if (trainingHiddenBlock[net_block[b][3]][m]) {
2257 for (j=net_block[b][3]+net_block[b][4] -1; j>=net_block[b][3]; j--) {
2259 jacobian(row_idx, col_idx--) = delta *act[j];
2260 debug(
"\t\tcol_idx: %d\n", col_idx+1);
2261 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta,act[j],delta*act[j]);
2264 jacobian(row_idx, col_idx--) = 0;
2265 debug(
"\t\tcol_idx: %d\n", col_idx+1);
2266 debug(
"\t\tjacobian(%d,%d) = 0\n", row_idx,col_idx+1);
2279 if (neuronbias[i+ninputs+nhiddens]) {
2280 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,(i==m ? delta : 0));
2281 jacobian(row_idx, col_idx--) = (i==m ? delta : 0);
2293 debug(
"\nBackward computation: hidden layer\n");
2296 for (b=net_nblocks-1; b>=0; b--) {
2303 debug(
"\ttrainingHiddenBlock[%d][%d]: %d\n", net_block[b][1],m,trainingHiddenBlock[net_block[b][1]][m]);
2304 if (net_block[b][0]!=0 || ! trainingHiddenBlock[net_block[b][1]][m] )
2311 #warning The trainLevembergMarquardtThroughTime method requires that all the connections to a particular hidden block are in the same net_block.
2314 for(j = net_block[b][1]+net_block[b][2]-1; j>= net_block[b][1]; j--) {
2316 double delta_h = delta* getWeight(m_freep, j) * derivative(j, act[j]);
2318 for (
int k = net_block[b][3] + net_block[b][4]-1; k>= net_block[b][3]; k--) {
2320 jacobian(row_idx, col_idx--) = delta_h * (isHidden(k) ? oldActivations[k-ninputs] : act[k]);
2321 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta_h,act[k],delta*act[k]);
2324 if (neuronbias[j]) {
2325 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,delta_h);
2326 jacobian(row_idx, col_idx--) = delta_h;
2343 for (
int i = 0; i<nhiddens; i++) {
2344 oldActivations[i] = act[i+ninputs];
2354 debug(
"\tAll rows analyzed\n");
2368 if (lambda > 100000000 || lambda < 0.000001) {
2374 ww_err = jacobian.transpose()*err;
2375 jj = jacobian.transpose()*jacobian;
2411 for (
int retry = 0; retry<6; retry++, lambda*=10) {
2413 debug(
"\tlambda: %f\n", lambda);
2416 new_weights = old_weights - (jj + lambda*Eigen::MatrixXf::Identity(nconnections,nconnections) ).ldlt().solve(ww_err);
2425 importWeightsFromVector(new_weights);
2427 currentError = computeMeanSquaredError(trainingSet, desiredOutput);
2429 printf(
"iteration: %d err: %f lambda: %f\n",cycles,currentError,lambda);
2431 debug(
"currentError: %f\n",currentError);
2433 if (currentError <= maxError)
2434 return currentError;
2435 if ((new_weights-old_weights).norm() < 0.0001) {
2436 printf(
"Minimum gradient reached\n");
2437 return currentError;
2440 if (currentError > previousError) {
2441 importWeightsFromVector(old_weights);
2444 previousError = currentError;
2454 return currentError;
2458 int Evonet::importWeightsFromMATLABFile(
char *path) {
2466 FILE *fp = fopen(path,
"r");
2474 for (
int i = 0; i<nneurons; i++) {
2475 wptr += (neuronbias[i]==1);
2481 for (b = 0; b<net_nblocks; b++) {
2482 if (net_block[b][5]==1) {
2483 for (
int i = 0; i<net_block[b][4]; i++) {
2484 for (
int j = 0; j<net_block[b][2]; j++) {
2485 fscanf(fp,
"%f", &freep[ wptr+i+j*net_block[b][4] ]);
2490 wptr+=net_block[b][2]*net_block[b][4];
2494 for (
int j=0; j<net_block[b][1]; j++) {
2495 if (neuronbias[j]) {
2500 for (
int i =0; i<net_block[b][2]; i++) {
2501 fscanf(fp,
"%f", &freep[biasptr++]);
2505 else if (net_block[b][0]==0) {
2506 wptr+= net_block[b][2]*net_block[b][4];
2521 int Evonet::exportWeightsToMATLABFile(
char *path) {
2529 FILE *fp = fopen(path,
"w");
2537 for (
int i = 0; i<nneurons; i++){
2538 wptr += (neuronbias[i]==1);
2544 for (b = 0; b<net_nblocks; b++) {
2545 if (net_block[b][5]==1) {
2546 for (
int i = 0; i<net_block[b][4]; i++) {
2547 for (
int j = 0; j<net_block[b][2]; j++) {
2548 fprintf(fp,
"%f\n", freep[ wptr+i+j*net_block[b][4] ]);
2553 wptr+=net_block[b][2]*net_block[b][4];
2557 for (
int j=0; j<net_block[b][1]; j++) {
2558 if (neuronbias[j]) {
2563 for (
int i =0; i<net_block[b][2]; i++) {
2564 fprintf(fp,
"%f\n", freep[biasptr++]);
2568 else if (net_block[b][0]==0) {
2569 wptr+= net_block[b][2]*net_block[b][4];
2584 void Evonet::setNeckReflex() {
2591 for (
int i = 0; i<nneurons; i++) {
2592 p += neuronbias[i]==1;
2610 #if defined(_MSC_VER)
2611 #pragma warning(pop)