26 #include "configurationhelper.h"
28 #include "mathutils.h"
34 #include <Eigen/Dense>
45 #pragma warning(disable:4996)
56 #if !defined(_MSC_VER) || _MSC_VER > 1600
69 freep =
new float[1000];
70 backpropfreep =
new float[1000];
71 teachingInput.fill(0.0,
MAXN);
76 selectedp= (
float **) malloc(100 *
sizeof(
float **));
77 for (
int i = 0; i <
MAXN; i++) {
88 nextStoredActivation = 0;
89 firstStoredActivation = 0;
111 if ( netFile !=
"" && (nSensors+nHiddens+nMotors)>0 ) {
112 Logger::error(
"Evonet - The information inside netFile will override any specification in all others parameters of Evonet" );
119 if ( netFile.isEmpty() ) {
124 nneurons = ninputs + nhiddens + noutputs;
125 if (this->nneurons >
MAXN) {
128 int inputNeuronType = 0;
130 if ( str == QString(
"no_delta") ) {
132 }
else if ( str == QString(
"with_delta") ) {
137 int hiddenNeuronType = 0;
139 if ( str == QString(
"logistic") ) {
140 hiddenNeuronType = 0;
141 }
else if ( str == QString(
"logistic+delta") ) {
142 hiddenNeuronType = 1;
143 }
else if ( str == QString(
"binary") ) {
144 hiddenNeuronType = 2;
145 }
else if ( str == QString(
"logistic_0.2") ) {
146 hiddenNeuronType = 3;
150 int outputNeuronType = 0;
152 if ( str == QString(
"no_delta") ) {
153 outputNeuronType = 0;
154 }
else if ( str == QString(
"with_delta") ) {
155 outputNeuronType = 1;
164 create_net_block( inputNeuronType, hiddenNeuronType, outputNeuronType, recurrentHiddens, inputOutputConnections, recurrentOutputs, biasOnHidden, biasOnOutput );
175 freep=
new float[nparameters+1000];
176 for(
int r=0;r<nparameters;r++)
179 delete[] backpropfreep;
180 backpropfreep=
new float[nparameters+1000];
181 for(
int r=0;r<nparameters;r++)
182 backpropfreep[r]=0.0f;
185 phep=
new float[nparameters+1000];
186 for(
int r=0;r<nparameters;r++)
190 muts=
new float[nparameters+1000];
191 for(
int r=0;r<nparameters;r++)
194 if ( !netFile.isEmpty() ) {
209 QFileInfo fileNet( netFile );
210 QString filePhe = fileNet.baseName() +
".phe";
220 for(
int i = 0; i < nhiddens; i++) {
221 sprintf(
neuronl[ninputs+i],
"h%d", i);
230 if ( netFile.isEmpty() ) {
231 params.
createParameter( prefix,
"nSensors", QString::number(ninputs) );
232 params.
createParameter( prefix,
"nHidden", QString::number(nhiddens) );
233 params.
createParameter( prefix,
"nMotors", QString::number(noutputs) );
237 params.
createParameter( prefix,
"weightRange", QString::number(wrange) );
238 params.
createParameter( prefix,
"gainRange", QString::number(grange) );
239 params.
createParameter( prefix,
"biasRange", QString::number(brange) );
247 d.
describeString(
"netFile" ).
help(
"The file .net where is defined the architecture to load. WARNING: when this parameter is specified any other parameters will be ignored" );
248 d.
describeReal(
"weightRange" ).
def(5.0f).
limits(1,+
Infinity).
help(
"The synpatic weight of the neural network can only assume values in [-weightRange, +weightRange]" );
251 d.
describeEnum(
"inputNeuronType" ).
def(
"no_delta").
values( QStringList() <<
"no_delta" <<
"with_delta" ).
help(
"The type of input neurons when the network is auto generated");
252 d.
describeEnum(
"hiddenNeuronType" ).
def(
"logistic").
values( QStringList() <<
"logistic" <<
"logistic+delta" <<
"binary" <<
"logistic_0.2" ).
help(
"The type of hidden neurons when the network is auto generated");
253 d.
describeEnum(
"outputNeuronType" ).
def(
"no_delta").
values( QStringList() <<
"no_delta" <<
"with_delta" ).
help(
"The type of output neurons when the network is auto generated");
254 d.
describeBool(
"recurrentHiddens" ).
def(
false).
help(
"when true generated a network with recurrent hidden neurons");
255 d.
describeBool(
"inputOutputConnections" ).
def(
false).
help(
"when true generated a network with input-output connections in addition to input-hidden-output connections");
256 d.
describeBool(
"recurrentOutputs" ).
def(
false).
help(
"when true generated a network with recurrent output neurons");
257 d.
describeBool(
"biasOnHiddenNeurons" ).
def(
false).
help(
"when true generate a network with hidden neurons with a bias");
258 d.
describeBool(
"biasOnOutputNeurons" ).
def(
false).
help(
"when true generate a network with output neurons with a bias");
259 d.
describeBool(
"showTeachingInput" ).
def(
false).
help(
"Whether the teaching input has to be shown in the UI");
263 return new EvonetUI(
this, &neuronsMonitorUploader );
266 void Evonet::create_net_block(
int inputNeuronType,
int hiddenNeuronType,
int outputNeuronType,
bool recurrentHiddens,
bool inputOutputConnections,
bool recurrentOutputs,
bool biasOnHidden,
bool biasOnOutput )
274 for(i = 0; i < this->ninputs; i++) {
275 this->neurontype[i]= inputNeuronType;
278 for(i = this->ninputs; i < (this->nneurons - this->noutputs); i++) {
279 this->neurontype[i]= hiddenNeuronType;
280 neuronbias[i] = (biasOnHidden) ? 1 : 0;
282 for(i = (this->nneurons - this->noutputs); i < this->nneurons; i++) {
283 this->neurontype[i]= outputNeuronType;
284 neuronbias[i] = (biasOnOutput) ? 1 : 0;
288 for(i=0; i < this->nneurons; i++) {
289 this->neurongain[i]= 0;
292 this->net_nblocks = 0;
294 this->net_block[this->net_nblocks][0] = 1;
295 this->net_block[this->net_nblocks][1] = 0;
296 this->net_block[this->net_nblocks][2] = this->ninputs;
297 this->net_block[this->net_nblocks][3] = 0;
298 this->net_block[this->net_nblocks][4] = 0;
299 this->net_block[this->net_nblocks][5] = 0;
303 if (this->nhiddens > 0) {
304 this->net_block[this->net_nblocks][0] = 0;
305 this->net_block[this->net_nblocks][1] = this->ninputs;
306 this->net_block[this->net_nblocks][2] = this->nhiddens;
307 this->net_block[this->net_nblocks][3] = 0;
308 this->net_block[this->net_nblocks][4] = this->ninputs;
309 this->net_block[this->net_nblocks][5] = 0;
314 if (recurrentHiddens) {
315 this->net_block[this->net_nblocks][0] = 0;
316 this->net_block[this->net_nblocks][1] = this->ninputs;
317 this->net_block[this->net_nblocks][2] = this->nhiddens;
318 this->net_block[this->net_nblocks][3] = this->ninputs;
319 this->net_block[this->net_nblocks][4] = this->nhiddens;
320 this->net_block[this->net_nblocks][5] = 0;
325 if (this->nhiddens > 0) {
326 this->net_block[this->net_nblocks][0] = 1;
327 this->net_block[this->net_nblocks][1] = this->ninputs;
328 this->net_block[this->net_nblocks][2] = this->nhiddens;
329 this->net_block[this->net_nblocks][3] = 0;
330 this->net_block[this->net_nblocks][4] = 0;
331 this->net_block[this->net_nblocks][5] = 0;
336 if (this->nhiddens == 0 || inputOutputConnections) {
337 this->net_block[this->net_nblocks][0] = 0;
338 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
339 this->net_block[this->net_nblocks][2] = this->noutputs;
340 this->net_block[this->net_nblocks][3] = 0;
341 this->net_block[this->net_nblocks][4] = this->ninputs;
342 this->net_block[this->net_nblocks][5] = 0;
347 if (this->nhiddens > 0) {
348 this->net_block[net_nblocks][0] = 0;
349 this->net_block[net_nblocks][1] = this->ninputs + this->nhiddens;
350 this->net_block[net_nblocks][2] = this->noutputs;
351 this->net_block[net_nblocks][3] = this->ninputs;
352 this->net_block[net_nblocks][4] = this->nhiddens;
353 this->net_block[this->net_nblocks][5] = 0;
358 if (recurrentOutputs) {
359 this->net_block[this->net_nblocks][0] = 0;
360 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
361 this->net_block[this->net_nblocks][2] = this->noutputs;
362 this->net_block[this->net_nblocks][3] = this->ninputs + this->nhiddens;
363 this->net_block[this->net_nblocks][4] = this->noutputs;
364 this->net_block[this->net_nblocks][5] = 0;
369 this->net_block[this->net_nblocks][0] = 1;
370 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
371 this->net_block[this->net_nblocks][2] = this->noutputs;
372 this->net_block[this->net_nblocks][3] = 0;
373 this->net_block[this->net_nblocks][4] = 0;
374 this->net_block[this->net_nblocks][5] = 0;
380 if (this->ninputs > this->noutputs) {
383 startx = ((this->noutputs - this->ninputs) / 2) * dx + 50;
385 for(i = 0; i < this->ninputs; i++, n++) {
386 this->neuronxy[n][0] = (i * dx) + startx;
387 this->neuronxy[n][1] = 400;
391 startx = this->ninputs * dx;
392 for(i=0; i < (this->nneurons - (this->ninputs + this->noutputs)); i++, n++) {
393 this->neuronxy[n][0] = startx + (i * dx);
394 this->neuronxy[n][1] = 225;
398 if (this->ninputs > this->noutputs) {
399 startx = ((this->ninputs - this->noutputs) / 2) * dx + 50;
403 for(i=0; i < this->noutputs; i++, n++) {
404 this->neuronxy[n][0] = startx + (i * dx);
405 this->neuronxy[n][1] = 50;
409 for(i=0; i < this->nneurons; i++) {
414 drawnymax = 400 + 30;
415 for(i = 0, drawnxmax = 0; i < nneurons; i++) {
416 if (neuronxy[i][0] > drawnxmax) {
417 drawnxmax = neuronxy[i][0];
444 const int bufferSize = 128;
445 char cbuffer[bufferSize];
447 if ((fp = fopen(filename,
"r")) != NULL)
449 fscanf(fp,
"ARCHITECTURE\n");
450 fscanf(fp,
"nneurons %d\n", &nneurons);
451 fscanf(fp,
"nsensors %d\n", &ninputs);
452 fscanf(fp,
"nmotors %d\n", &noutputs);
454 Logger::error(
"Evonet - increase MAXN to support more than "+QString::number(
MAXN)+
" neurons" );
455 nhiddens = nneurons - (ninputs + noutputs);
456 fscanf(fp,
"nblocks %d\n", &net_nblocks);
457 for (b=0; b < net_nblocks; b++)
459 fscanf(fp,
"%d %d %d %d %d %d", &net_block[b][0],&net_block[b][1],&net_block[b][2],&net_block[b][3],&net_block[b][4], &net_block[b][5]);
460 if (net_block[b][0] == 0)
461 fscanf(fp,
" // connections block\n");
462 if (net_block[b][0] == 1)
463 fscanf(fp,
" // block to be updated\n");
464 if (net_block[b][0] == 2)
465 fscanf(fp,
" // gain block\n");
466 if (net_block[b][0] == 3)
467 fscanf(fp,
" // modulated gain block\n");
470 fscanf(fp,
"neurons bias, delta, gain, xy position, display\n");
473 for(n=0; n < nneurons; n++)
475 fscanf(fp,
"%d %d %d %d %d %d\n", &neuronbias[n], &neurontype[n], &neurongain[n], &neuronxy[n][0], &neuronxy[n][1], &
neurondisplay[n]);
476 if(drawnxmax < neuronxy[n][0])
477 drawnxmax = neuronxy[n][0];
478 if(drawnymax < neuronxy[n][1])
479 drawnymax = neuronxy[n][1];
486 fscanf(fp,
"FREE PARAMETERS %d\n", &np);
487 if (nparameters != np) {
488 Logger::error(QString(
"ERROR: parameters defined are %1 while %2 contains %3 parameters").arg(nparameters).arg(filename).arg(np));
495 while (fgets(cbuffer,bufferSize,fp) != NULL && i < np)
498 QString line = cbuffer;
499 QStringList lineContent = line.split(QRegExp(
"\\s+"), QString::SkipEmptyParts);
501 bool floatOnSecondPlace =
false;
502 lineContent[1].toFloat(&floatOnSecondPlace);
504 if(lineContent.contains(
"*") || floatOnSecondPlace)
520 Logger::info(
"Evonet - loaded file " + QString(filename) );
525 Logger::warning(
"Evonet - File " + QString(filename) +
" not found" );
532 *par = line[0].toFloat();
562 *par = line[0].toFloat();
569 *mut = line[1].toFloat();
584 char* default_string =
"*\t\t";
588 mu[h] =
new char[50];
592 mu[h] = default_string;
594 sprintf(mu[h],
"%f", muts[h]);
598 p[h] = default_string;
600 sprintf(p[h],
"%f", freep[h]);
604 if ((fp = fopen(filename,
"w")) != NULL) {
605 fprintf(fp,
"ARCHITECTURE\n");
606 fprintf(fp,
"nneurons %d\n", nneurons);
607 fprintf(fp,
"nsensors %d\n", ninputs);
608 fprintf(fp,
"nmotors %d\n", noutputs);
609 fprintf(fp,
"nblocks %d\n", net_nblocks);
610 for (b = 0; b < net_nblocks; b++) {
611 fprintf(fp,
"%d %d %d %d %d %d", net_block[b][0],net_block[b][1],net_block[b][2],net_block[b][3],net_block[b][4],net_block[b][5]);
612 if (net_block[b][0] == 0) {
613 fprintf(fp,
" // connections block\n");
614 }
else if (net_block[b][0] == 1) {
615 fprintf(fp,
" // block to be updated\n");
616 }
else if (net_block[b][0] == 2) {
617 fprintf(fp,
" // gain block\n");
618 }
else if (net_block[b][0] == 3) {
619 fprintf(fp,
" // modulated gain block\n");
622 fprintf(fp,
"neurons bias, delta, gain, xy position, display\n");
623 for(n = 0; n < nneurons; n++) {
624 fprintf(fp,
"%d %d %d %d %d %d\n", neuronbias[n], neurontype[n], neurongain[n], neuronxy[n][0], neuronxy[n][1],
neurondisplay[n]);
629 fprintf(fp,
"FREE PARAMETERS %d\n", nparameters);
630 for(i = 0; i < nneurons; i++) {
631 if (neurongain[i] == 1) {
632 fprintf(fp,
"%s \t %s \tgain %s\n",*p, *mu,
neuronl[i]);
637 for(i=0; i<nneurons; i++) {
638 if (neuronbias[i] == 1) {
639 fprintf(fp,
"%s \t %s \tbias %s\n",*p, *mu,
neuronl[i]);
644 for (b=0; b < net_nblocks; b++) {
645 if (net_block[b][0] == 0) {
646 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
647 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
648 fprintf(fp,
"%s \t %s \tweight %s from %s\n",*p, *mu,
neuronl[t],
neuronl[i]);
653 }
else if (net_block[b][0] == 1) {
654 for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
655 if (neurontype[t] == 1) {
657 if(*p != default_string) {
659 timeC = fabs(timeC)/wrange;
662 fprintf(fp,
"%s \t %s \ttimeconstant %s (%f)\n", *p, *mu,
neuronl[t], timeC);
672 Logger::info(
"Evonet - controller saved on file " + QString(filename) );
674 Logger::error(
"Evonet - unable to create the file " + QString(filename) );
684 return((
float) (1.0 / (1.0 + exp(0.0 - f))));
689 return 2.0/(1.0+exp(-2.0*f))-1.0;
705 for(i=0;i < nneurons;i++) {
709 for(i=0;i < nneurons;i++) {
710 if (neurongain[i] == 1) {
715 for(i=0;i < nneurons;i++) {
716 if (neuronbias[i] == 1) {
721 for(i=0;i < nneurons;i++) {
722 if (neurontype[i] == 1) {
727 for (b=0; b < net_nblocks; b++) {
729 if (net_block[b][0] == 0) {
730 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
731 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
739 for(i=0;i < nneurons;i++) {
740 if (updated[i] < 1 && nwarnings == 0) {
741 Logger::warning(
"Evonet - neuron " + QString::number(i) +
" will never be activated according to the current architecture" );
744 if (updated[i] > 1 && nwarnings == 0) {
745 Logger::warning(
"Evonet - neuron " + QString::number(i) +
" will be activated more than once according to the current architecture" );
759 float netinput[
MAXN];
766 for(i=0;i < nneurons;i++) {
767 if (neurongain[i] == 1) {
768 gain[i] = (float) (fabs((
double) *p) / wrange) * grange;
780 for(i=0;i < nneurons;i++) {
781 if (neuronbias[i] == 1) {
783 netinput[i] = ((double)*p/wrange)*brange;
793 for (b=0; b < net_nblocks; b++) {
795 if (net_block[b][0] == 0) {
796 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
797 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
798 netinput[t] += act[i] * gain[i] * *p;
805 if (net_block[b][0] == 2) {
806 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
807 gain[t] = gain[net_block[b][1]];
811 if (net_block[b][0] == 3) {
812 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
813 gain[t] = act[net_block[b][3]];
817 if (net_block[b][0] == 1) {
818 for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
820 switch(neurontype[t]) {
825 delta = (float) (fabs((
double) *p) / wrange);
827 act[t] = (act[t] * delta) + (input[t] * (1.0f - delta));
834 switch(neurontype[t]) {
841 delta = (float) (fabs((
double) *p) / wrange);
843 act[t] = (act[t] * delta) + (
logistic(netinput[t]) * (1.0f - delta));
846 if (netinput[t] >= 0.0) {
853 act[t] =
logistic(netinput[t]*0.2f);
866 memcpy(storedActivations[nextStoredActivation], act, nneurons *
sizeof(
float));
868 if (firstStoredActivation == nextStoredActivation) {
883 d->activations =
true;
886 d->data.reserve(nneurons);
887 d->data.resize(nneurons);
890 std::copy(act, &(act[nneurons]), d->data.begin());
893 d->updatesCounter = updatescounter;
899 if (inp>=ninputs || inp<0) {
911 return act[ninputs+nhiddens+out];
916 return this->input[in];
927 for (i = 0; i <
MAXN; i++) {
938 act[this->ninputs+nh] = val;
944 if(h<nhiddens && h>=0) {
945 return act[this->ninputs+h];
953 return this->nparameters;
976 *p = wrange - ((float)dt[i]/geneMaxValue)*wrange*2;
996 pheGene[i] = (int)((wrange - phep[i])*geneMaxValue/(2*wrange));
1006 for (
int in = 0; in < this->ninputs; in++) {
1007 output += QString(
"%1 ").arg(this->input[in], 0,
'f', 10);
1010 for (
int hi = this->ninputs; hi < (this->nneurons - this->noutputs); hi++) {
1011 output += QString(
"%1 ").arg(this->act[hi], 0,
'f', 10);
1014 for (
int out = 0; out < this->noutputs; out++) {
1015 output += QString(
"%1 ").arg(this->act[this->ninputs+this->nhiddens+out], 0,
'f', 10);
1025 if (nbias<nparambias && nbias>-1) {
1026 pb=(int) freep[nparambias+nbias];
1049 Logger::info(
"Evonet - ninputs " + QString::number(this->ninputs));
1050 Logger::info(
"Evonet - nhiddens " + QString::number(this->nhiddens));
1051 Logger::info(
"Evonet - noutputs " + QString::number(this->noutputs));
1052 Logger::info(
"Evonet - nneurons " + QString::number(this->nneurons));
1054 for(
int i=0;i<this->net_nblocks;i++) {
1055 Logger::info( QString(
"Evonet Block - %1 | %2 - %3 -> %4 - %5 | %6" )
1056 .arg(net_block[i][0])
1057 .arg(net_block[i][1])
1058 .arg(net_block[i][2])
1059 .arg(net_block[i][3])
1060 .arg(net_block[i][4])
1061 .arg(net_block[i][5]));
1094 if (firstStoredActivation == nextStoredActivation) {
1098 const int ret = firstStoredActivation;
1100 return storedActivations[ret];
1105 return updatescounter;
1111 printf(
"%.2f ",freep[i]);
1118 float range = max-
min;
1121 freep[i] = (((float) rand())/RAND_MAX)*range +
min;
1144 const float brange = maxBias - minBias;
1145 const float wrange = maxWeight - minWeight;
1151 for(
int i = 0; i < nneurons; ++i) {
1152 if (neurongain[i] == 1) {
1153 *(p++) = (((
float) rand()) / float(RAND_MAX)) * brange + minBias;
1158 for(
int i = 0; i < nneurons; ++i) {
1159 if (neuronbias[i] == 1) {
1160 *(p++) = (((
float) rand()) /
float(RAND_MAX)) * brange + minBias;
1166 *p = (((float) rand()) /
float(RAND_MAX)) * wrange + minWeight;
1174 double beta = 0.7 * pow(nhiddens, 1.0/ninputs);
1179 for (
int i =0; i<nhiddens; i++) {
1180 for (
int j = 0; j<ninputs; j++) {
1185 if (neuronbias[i+ninputs]) {
1188 for (
int j = 0; j<i+ninputs; j++) {
1193 norm += freep[ptr]*freep[ptr];
1201 double k = beta/norm;
1203 for (
int i =0; i<nhiddens; i++) {
1204 for (
int j = 0; j<ninputs; j++) {
1208 if (neuronbias[i+ninputs]) {
1211 for (
int j = 0; j<i+ninputs; j++) {
1232 for (
int i = 0; i<nneurons; i++) {
1233 if (neuronbias[i]) {
1245 for (
int b=0; b<net_nblocks; b++) {
1246 for (
int i=0; i<net_block[b][2]*net_block[b][4]; i++) {
1247 if (net_block[b][0] == 0 && net_block[b][5]==1){
1251 else if (net_block[b][0] == 0 && net_block[b][5]==0){
1259 for (
int i = 0; i<nneurons; i++)
1275 #define bpdebug(x,...) printf(x,##__VA_ARGS__)
1276 #define debug(x,...) printf(x,##__VA_ARGS__)
1278 #define bpdebug(x,...)
1279 #define debug(x,...)
1282 #define inRange(x,y,z) ( (x) >= (y) && (x) < (y)+(z) )
1284 int Evonet::isHidden(
int neuron){
1285 return neuron >= ninputs && neuron < ninputs+nhiddens;
1463 for (
int i = 0; i<nneurons; i++) {
1464 printf(
"act[%d]: %f\n",i,act[i]);
1473 int size = trainingSet.size()/ninputs;
1479 for (
int i = 0; i<nneurons; i++) {
1484 for (
int i = 0; i<size; i++) {
1486 for (
int j = 0; j<ninputs; j++) {
1487 setInput(j, trainingSet[i*ninputs + j]);
1492 for (
int j=0; j<noutputs; j++) {
1493 if (!outputsToTrain[j])
1496 tmp = desiredOutput[ptr++] - act[j+ninputs+nhiddens];
1498 err += tmp*tmp*err_weights[j]*err_weights[j];
1510 return err / (err_weight_sum*size);
1521 int wPtr = 0, paramPtr = 0;
1524 for (
int i = 0; i<nneurons; i++) {
1525 nbiases += (neuronbias[i]==1);
1533 for (
int b = 0; b<net_nblocks; b++) {
1540 if (net_block[b][0] != 0)
1543 if (net_block[b][5]==1 && !(net_block[b][1]>=ninputs+nhiddens)) {
1544 for (
int i = net_block[b][1]; i<net_block[b][1]+net_block[b][2]; i++) {
1546 if (neuronbias[i]) {
1555 for (
int j = 0; j<i; j++) {
1560 debug(
"Adding bias of neuron %d (freep[%d]) in w[%d]\n",i,ptr,wPtr);
1561 w[wPtr++] = freep[ptr];
1569 for (
int j = 0; j<net_block[b][4]; j++) {
1570 debug(
"Adding connection %d of neuron %d (freep[%d]) in w[%d]\n",j,i,paramPtr,wPtr);
1571 w[wPtr++] = freep[paramPtr++];
1576 paramPtr+= net_block[b][2]*net_block[b][4];
1585 for (
int i = 0; i<noutputs; i++) {
1587 if (!outputsToTrain[i])
1590 int i_freep = i+ninputs+nhiddens;
1592 if (neuronbias[i_freep]) {
1601 for (
int j = 0; j<i_freep; j++) {
1606 debug(
"Adding bias of output %d (freep[%d]) in w[%d]\n",i,ptr,wPtr);
1607 w[wPtr++] = freep[ptr];
1612 for (
int b = 0; b<net_nblocks; b++) {
1614 debug(
"Accessing trainingHiddenBlock[net_block[%d][3] = %d][%d]\n",b,net_block[b][3],i);
1615 if(!(trainingHiddenBlock[net_block[b][3]][i] && inRange(net_block[b][1], ninputs+nhiddens, noutputs) )) {
1617 paramPtr+= net_block[b][2]*net_block[b][4];
1619 debug(
"\tparamPtr: %d\n", paramPtr);
1627 for (
int j = 0; j<net_block[b][4]; j++) {
1628 debug(
"Adding connection %d of output %d (freep[%d]) in w[%d]\n",j,i_freep,(i_freep-net_block[b][1])*net_block[b][4] + paramPtr,wPtr);
1629 w[wPtr++] = freep[(i_freep-net_block[b][1])*net_block[b][4] + paramPtr++];
1635 for (
int i = 0; i<w.size(); i++) {
1649 int wPtr = 0, paramPtr = 0;
1652 for (
int i = 0; i<nneurons; i++) {
1653 nbiases += (neuronbias[i]==1);
1661 for (
int b = 0; b<net_nblocks; b++) {
1668 if (net_block[b][0] != 0)
1671 if (net_block[b][5]==1 && !(net_block[b][1]>=ninputs+nhiddens)) {
1672 for (
int i = net_block[b][1]; i<net_block[b][1]+net_block[b][2]; i++) {
1674 if (neuronbias[i]) {
1683 for (
int j = 0; j<i; j++) {
1688 debug(
"Adding bias of neuron %d (w[%d]) in freep[%d]\n",i,wPtr,ptr);
1689 freep[ptr] = w[wPtr++];
1697 for (
int j = 0; j<net_block[b][4]; j++) {
1698 debug(
"Adding connection %d of neuron %d (w[%d]) in freep[%d]\n",j,i,wPtr,paramPtr);
1699 freep[paramPtr++] = w[wPtr++];
1704 paramPtr+= net_block[b][2]*net_block[b][4];
1713 for (
int i = 0; i<noutputs; i++) {
1715 if (!outputsToTrain[i])
1718 int i_freep = i+ninputs+nhiddens;
1720 if (neuronbias[i_freep]) {
1729 for (
int j = 0; j<i_freep; j++) {
1733 debug(
"Adding bias of output %d (w[%d]) in freep[%d]\n",i,wPtr,ptr);
1734 freep[ptr] = w[wPtr++];
1739 for (
int b = 0; b<net_nblocks; b++) {
1741 if(!(trainingHiddenBlock[net_block[b][3]][i] && inRange(net_block[b][1], ninputs+nhiddens, noutputs) )) {
1743 paramPtr+= net_block[b][2]*net_block[b][4];
1751 for (
int j = 0; j<net_block[b][4]; j++) {
1752 debug(
"Adding connection %d of output %d (w[%d]) in freep[%d]\n",j,i_freep,wPtr,(i_freep-net_block[b][1])*net_block[b][4] + paramPtr);
1753 freep[(i_freep-net_block[b][1])*net_block[b][4] + paramPtr++] = w[wPtr++];
1764 debug(
"Getting w to %d from %d\n", to,from);
1766 for (
int i = 0; i<nneurons; i++) {
1767 ptr += neuronbias[i]==1;
1770 for (
int b = 0; b<net_nblocks; b++) {
1771 if (inRange(to, net_block[b][1], net_block[b][2]) && inRange(from, net_block[b][3], net_block[b][4])) {
1772 ptr+= (to-net_block[b][1])*net_block[b][4]+(from-net_block[b][3]);
1775 ptr+= net_block[b][2]*net_block[b][4];
1779 debug(
"Returning freep[%d]\n", ptr);
1790 for (
int i = 0; i<nneurons; i++) {
1791 ptr += neuronbias[i]==1;
1794 for (
int b = 0; b<net_nblocks; b++) {
1795 if (inRange(to, net_block[b][1], net_block[b][2]) && inRange(from, net_block[b][3], net_block[b][4])) {
1796 ptr+= (to-net_block[b][1])*net_block[b][4]+(from-net_block[b][3]);
1799 ptr+= net_block[b][2]*net_block[b][4];
1808 float Evonet::derivative(
int ,
float x) {
1828 outputsToTrain = (
char*)calloc(noutputs,
sizeof(
char));
1829 n_outputsToTrain = 0;
1831 trainingHiddenBlock = (
char**)calloc(nneurons,
sizeof(
char*));
1832 for (
int i = 0; i<nneurons; i++) {
1833 trainingHiddenBlock[i] = (
char*) calloc(noutputs,
sizeof(
char));
1836 for (
int b=0; b<net_nblocks; b++)
1838 if (net_block[b][0]==0 && net_block[b][5]==1) {
1844 nconnections += (net_block[b][2]*net_block[b][4]);
1850 for (
int i = net_block[b][1]; i<net_block[b][1]+net_block[b][2]; i++) {
1851 nconnections += (neuronbias[i] == 1);
1854 if (net_block[b][1] >= ninputs+nhiddens) {
1855 memset(outputsToTrain+net_block[b][1]-ninputs-nhiddens, 1, net_block[b][2]*
sizeof(
char));
1856 n_outputsToTrain += net_block[b][2];
1858 for(
int j=0;j<net_block[b][4];j++)
1859 memset(&trainingHiddenBlock[net_block[b][3]+j][net_block[b][1]-ninputs-nhiddens], 1, net_block[b][2]*
sizeof(
char));
1865 printf(
"n_outputToTrain: %d\n",n_outputsToTrain);
1866 printf(
"output to train: ");
1868 for (
int i = 0; i<noutputs; i++) {
1869 printf(
"%d ",outputsToTrain[i]);
1873 for (
int j = 0; j<nneurons; j++) {
1874 for (
int i = 0; i<noutputs; i++) {
1875 printf(
"%d ",trainingHiddenBlock[j][i]);
1880 debug(
"nconnections: %d\n", nconnections);
1883 for (
int i = 0; i<err_w.size(); i++) {
1884 err_weights.push_back(err_w[i]*err_w[i]);
1885 err_weight_sum+=err_w[i];
1888 printf(
"err_weight_sum : %f\n",err_weight_sum);
1893 free(outputsToTrain);
1894 for(
int i=0;i<nneurons;i++)
1895 free(trainingHiddenBlock[i]);
1896 free(trainingHiddenBlock);
1908 return teachingInput[id];
1913 return backproperror;
1922 teachingInput = tInput;
1924 Eigen::MatrixXf weightMatrix(
MAXN,
MAXN);
1926 float global_error = 0.0;
1932 for(i = 0; i < nneurons; i++) {
1933 if (neurongain[i] == 1) {
1938 for(i = 0; i < nneurons; i++) {
1939 if (neuronbias[i] == 1) {
1944 for (b=0; b < net_nblocks; b++) {
1945 if (net_block[b][0] == 0) {
1946 for(t = net_block[b][1]; t < net_block[b][1] + net_block[b][2]; t++) {
1947 for(i = net_block[b][3]; i < net_block[b][3] + net_block[b][4]; i++) {
1948 weightMatrix(i,t) = *p;
1956 for (i = ninputs + nhiddens; i < nneurons; i++) {
1957 diff = (tInput[i - (ninputs + nhiddens)] - act[i]);
1959 delta[i] = diff * act[i] * ((float) 1.0 - act[i]);
1961 global_error += diff * diff;
1965 for (i = ninputs; i < ninputs + nhiddens; i++) {
1967 for (t = ninputs + nhiddens; t < nneurons; t++) {
1968 temp += delta[t] * (weightMatrix(i,t));
1970 delta[i] = ((float) 1.0 - act[i]) * act[i] * temp;
1977 for(i = 0; i < nneurons; i++) {
1978 if (neurongain[i] == 1) {
1984 for(i = 0; i < nneurons; i++) {
1985 if (neuronbias[i] == 1) {
1986 float dp_rate = delta[i] * rate;
1988 *p += (float) 1.0 * dp_rate;
1994 for (b=0; b < net_nblocks; b++) {
1995 if (net_block[b][0] == 0) {
1996 for (t = net_block[b][1]; t < net_block[b][1] + net_block[b][2]; t++) {
1997 for (i = net_block[b][3]; i < net_block[b][3] + net_block[b][4]; i++) {
1999 float dp_rate = delta[t] * rate;
2001 *p += act[i] * dp_rate;
2009 backproperror = global_error;
2012 if (showTInput && (neuronsMonitorUploader.
getDownloader() != NULL)) {
2016 d->activations =
false;
2019 d->data.reserve(nneurons);
2021 d->data.append(backproperror);
2024 d->updatesCounter = updatescounter;
2027 return global_error;
2037 teachingInput = tInput;
2039 Eigen::MatrixXf weightMatrix(
MAXN,
MAXN);
2041 float global_error = 0.0;
2047 for(i = 0; i < nneurons; i++) {
2048 if (neurongain[i] == 1) {
2053 for(i = 0; i < nneurons; i++) {
2054 if (neuronbias[i] == 1) {
2059 for (b=0; b < net_nblocks; b++) {
2060 if (net_block[b][0] == 0) {
2061 for(t = net_block[b][1]; t < net_block[b][1] + net_block[b][2]; t++) {
2062 for(i = net_block[b][3]; i < net_block[b][3] + net_block[b][4]; i++) {
2063 weightMatrix(i,t) = *p;
2071 for (i = ninputs + nhiddens; i < nneurons; i++) {
2072 diff = (tInput[i - (ninputs + nhiddens)] - act[i]);
2074 delta[i] = diff * act[i] * ((float) 1.0 - act[i]);
2076 global_error += diff * diff;
2080 for (i = ninputs; i < ninputs + nhiddens; i++) {
2082 for (t = ninputs + nhiddens; t < nneurons; t++) {
2083 temp += delta[t] * (weightMatrix(i,t));
2085 delta[i] = ((float) 1.0 - act[i]) * act[i] * temp;
2092 for(i = 0; i < nneurons; i++) {
2093 if (neurongain[i] == 1) {
2099 for(i = 0; i < nneurons; i++) {
2100 if (neuronbias[i] == 1) {
2101 float dp_rate = delta[i] * rate;
2103 *bp += (float) 1.0 * dp_rate;
2109 for (b=0; b < net_nblocks; b++) {
2110 if (net_block[b][0] == 0) {
2111 for (t = net_block[b][1]; t < net_block[b][1] + net_block[b][2]; t++) {
2112 for (i = net_block[b][3]; i < net_block[b][3] + net_block[b][4]; i++) {
2114 float dp_rate = delta[t] * rate;
2116 *bp += act[i] * dp_rate;
2124 backproperror = global_error;
2127 if (showTInput && (neuronsMonitorUploader.
getDownloader() != NULL)) {
2131 d->activations =
false;
2134 d->data.reserve(nneurons);
2136 d->data.append(backproperror);
2139 d->updatesCounter = updatescounter;
2142 return global_error;
2149 float global_error = 0.0;
2152 for (i = ninputs + nhiddens; i < nneurons; i++) {
2153 diff = (tInput[i - (ninputs + nhiddens)] - act[i]);
2155 global_error += diff * diff;
2159 backproperror = global_error;
2168 for (i = 0; i < nparameters; i++)
2183 origp = backpropfreep;
2185 for (i = 0; i < nparameters; i++)
2210 double lambda=0.001;
2211 double currentError = 0, previousError= 0;
2214 if (nconnections == 0) {
2216 printf(
"nconnections: 0\nnothing to train\n");
2221 for (i = 0; i<nneurons; i++) {
2222 nbiases += neuronbias[i]==1;
2225 int size = trainingSet.size() / ninputs;
2226 debug(
"npatters: %d\n", size);
2228 Eigen::VectorXf err(size*n_outputsToTrain);
2229 Eigen::MatrixXf jacobian(size*n_outputsToTrain, nconnections );
2232 Eigen::MatrixXf jj(nconnections,nconnections);
2234 Eigen::VectorXf new_weights(nconnections);
2235 Eigen::VectorXf old_weights(nconnections);
2236 Eigen::VectorXf ww_err(nconnections);
2240 printf(
"Initial error: %f\n",previousError);
2250 for (cycles = 0; cycles<end; cycles++) {
2255 debug(
"weights extracted\n");
2261 for (pattern=0; pattern<size; pattern++) {
2263 debug(
"\n\n------------\n\n");
2264 debug(
"\tpattern: %d\n", pattern);
2272 for (i = 0; i<ninputs; i++) {
2273 setInput(i, trainingSet[pattern*ninputs + i]);
2287 for(
int m = noutputs-1; m>=0; m--) {
2288 if (!outputsToTrain[m])
2291 int m_freep = m+ninputs+nhiddens;
2293 int col_idx = nconnections - 1;
2294 int row_idx = n_outputsToTrain*pattern-1;
2296 for (i = 0; i<=m; i++) {
2297 row_idx+= outputsToTrain[i];
2304 err[row_idx] = (desiredOutput[row_idx] - act[m_freep])*err_weights[m];
2305 delta = -derivative(m_freep, act[m_freep])*err_weights[m];
2311 for(i = noutputs-1; i>=0; i--) {
2313 if (!outputsToTrain[i])
2319 for (b=net_nblocks-1; b>=0; b--) {
2325 if (trainingHiddenBlock[net_block[b][3]][m] && net_block[b][5]==1) {
2327 for (j=net_block[b][3]+net_block[b][4] -1; j>=net_block[b][3]; j--) {
2329 jacobian(row_idx, col_idx--) = delta *act[j];
2330 debug(
"\t\tcol_idx: %d\n", col_idx+1);
2331 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta,act[j],delta*act[j]);
2334 jacobian(row_idx, col_idx--) = 0;
2335 debug(
"\t\tcol_idx: %d\n", col_idx+1);
2336 debug(
"\t\tjacobian(%d,%d) = 0\n", row_idx,col_idx+1);
2349 if (neuronbias[i+ninputs+nhiddens]) {
2350 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,delta);
2351 jacobian(row_idx, col_idx--) = (i==m ? delta : 0);
2363 debug(
"\nBackward computation: hidden layer\n");
2366 for (b=net_nblocks-1; b>=0; b--) {
2373 debug(
"\ttrainingHiddenBlock[%d][%d]: %d\n", net_block[b][1],m,trainingHiddenBlock[net_block[b][1]][m]);
2374 if (net_block[b][0]!=0 || net_block[b][5] !=1 || ! trainingHiddenBlock[net_block[b][1]][m] )
2381 for(j = net_block[b][1]+net_block[b][2]-1; j>= net_block[b][1]; j--) {
2383 double delta_h = delta*
getWeight(m_freep, j) * derivative(j, act[j]);
2385 for (
int k = net_block[b][3] + net_block[b][4]-1; k>= net_block[b][3]; k--) {
2386 jacobian(row_idx, col_idx--) = delta_h * act[k];
2387 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta_h,act[k],delta*act[k]);
2390 if (neuronbias[j]) {
2391 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,delta_h);
2392 jacobian(row_idx, col_idx--) = delta_h;
2408 debug(
"\tAll rows analyzed\n");
2422 if (lambda > 100000000 || lambda < 0.000001) {
2428 ww_err = jacobian.transpose()*err;
2429 jj = jacobian.transpose()*jacobian;
2465 for (
int retry = 0; retry<6; retry++, lambda*=10) {
2467 debug(
"\tlambda: %f\n", lambda);
2470 new_weights = old_weights - (jj + lambda*Eigen::MatrixXf::Identity(nconnections,nconnections) ).ldlt().solve(ww_err);
2483 printf(
"iteration: %d err: %f lambda: %f\n",cycles,currentError,lambda);
2485 debug(
"currentError: %f\n",currentError);
2487 if (currentError <= maxError)
2488 return currentError;
2489 if ((new_weights-old_weights).norm() < 0.0001) {
2490 printf(
"Minimum gradient reached\n");
2491 return currentError;
2494 if (currentError > previousError) {
2498 previousError = currentError;
2508 return currentError;
2520 double lambda=0.001;
2521 double currentError = 0, previousError= 0;
2524 if (nconnections == 0) {
2526 printf(
"nconnections: 0\nnothing to train\n");
2531 for (i = 0; i<nneurons; i++) {
2532 nbiases += neuronbias[i]==1;
2535 int size = trainingSet.size() / ninputs;
2536 debug(
"npatters: %d\n", size);
2538 Eigen::VectorXf oldActivations(time*nhiddens);
2539 oldActivations.setZero();
2541 Eigen::VectorXf err(size*n_outputsToTrain);
2542 Eigen::MatrixXf jacobian(size*n_outputsToTrain, nconnections );
2545 Eigen::MatrixXf jj(nconnections,nconnections);
2547 Eigen::VectorXf new_weights(nconnections);
2548 Eigen::VectorXf old_weights(nconnections);
2549 Eigen::VectorXf ww_err(nconnections);
2554 printf(
"Initial error: %f\n",previousError);
2564 for (cycles = 0; cycles<end; cycles++) {
2569 debug(
"weights extracted\n");
2575 for (pattern=0; pattern<size; pattern++) {
2577 debug(
"\n\n------------\n\n");
2578 debug(
"\tpattern: %d\n", pattern);
2586 for (i = 0; i<ninputs; i++) {
2587 setInput(i, trainingSet[pattern*ninputs + i]);
2601 for(
int m = noutputs-1; m>=0; m--) {
2603 debug(
"m: %d\n", m);
2604 if (!outputsToTrain[m])
2607 int m_freep = m+ninputs+nhiddens;
2609 int col_idx = nconnections - 1;
2610 int row_idx = n_outputsToTrain*pattern-1;
2612 debug(
"row_idx: %d\n", row_idx);
2613 for (i = 0; i<=m; i++) {
2614 row_idx+= outputsToTrain[i];
2616 debug(
"row_idx: %d\n", row_idx);
2621 err[row_idx] = (desiredOutput[row_idx] - act[m_freep])*err_weights[m];
2622 delta = -derivative(m_freep, act[m_freep])*err_weights[m];
2628 for(i = noutputs-1; i>=0; i--) {
2630 debug(
"\toutput: %d\n", i);
2631 if (!outputsToTrain[i])
2637 for (b=net_nblocks-1; b>=0; b--) {
2643 if (trainingHiddenBlock[net_block[b][3]][m]) {
2645 for (j=net_block[b][3]+net_block[b][4] -1; j>=net_block[b][3]; j--) {
2647 jacobian(row_idx, col_idx--) = delta *act[j];
2648 debug(
"\t\tcol_idx: %d\n", col_idx+1);
2649 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta,act[j],delta*act[j]);
2652 jacobian(row_idx, col_idx--) = 0;
2653 debug(
"\t\tcol_idx: %d\n", col_idx+1);
2654 debug(
"\t\tjacobian(%d,%d) = 0\n", row_idx,col_idx+1);
2667 if (neuronbias[i+ninputs+nhiddens]) {
2668 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,(i==m ? delta : 0));
2669 jacobian(row_idx, col_idx--) = (i==m ? delta : 0);
2681 debug(
"\nBackward computation: hidden layer\n");
2684 for (b=net_nblocks-1; b>=0; b--) {
2691 debug(
"\ttrainingHiddenBlock[%d][%d]: %d\n", net_block[b][1],m,trainingHiddenBlock[net_block[b][1]][m]);
2692 if (net_block[b][0]!=0 || ! trainingHiddenBlock[net_block[b][1]][m] )
2698 #if defined(__GNUC__) && defined(DEVELOPER_WARNINGS)
2699 #warning The trainLevembergMarquardtThroughTime method requires that all the connections to a particular hidden block are in the same net_block.
2702 for(j = net_block[b][1]+net_block[b][2]-1; j>= net_block[b][1]; j--) {
2704 double delta_h = delta*
getWeight(m_freep, j) * derivative(j, act[j]);
2706 for (
int k = net_block[b][3] + net_block[b][4]-1; k>= net_block[b][3]; k--) {
2708 jacobian(row_idx, col_idx--) = delta_h * (isHidden(k) ? oldActivations[k-ninputs] : act[k]);
2709 debug(
"\t\tjacobian(%d,%d) = %f * %f = %f\n", row_idx,col_idx+1,delta_h,act[k],delta*act[k]);
2712 if (neuronbias[j]) {
2713 debug(
"\t\tjacobian(%d,%d) = %f\n", row_idx,col_idx,delta_h);
2714 jacobian(row_idx, col_idx--) = delta_h;
2731 for (
int i = 0; i<nhiddens; i++) {
2732 oldActivations[i] = act[i+ninputs];
2742 debug(
"\tAll rows analyzed\n");
2756 if (lambda > 100000000 || lambda < 0.000001) {
2762 ww_err = jacobian.transpose()*err;
2763 jj = jacobian.transpose()*jacobian;
2799 for (
int retry = 0; retry<6; retry++, lambda*=10) {
2801 debug(
"\tlambda: %f\n", lambda);
2804 new_weights = old_weights - (jj + lambda*Eigen::MatrixXf::Identity(nconnections,nconnections) ).ldlt().solve(ww_err);
2817 printf(
"iteration: %d err: %f lambda: %f\n",cycles,currentError,lambda);
2819 debug(
"currentError: %f\n",currentError);
2821 if (currentError <= maxError)
2822 return currentError;
2823 if ((new_weights-old_weights).norm() < 0.0001) {
2824 printf(
"Minimum gradient reached\n");
2825 return currentError;
2828 if (currentError > previousError) {
2832 previousError = currentError;
2842 return currentError;
2846 int Evonet::importWeightsFromMATLABFile(
char *path) {
2854 FILE *fp = fopen(path,
"r");
2862 for (
int i = 0; i<nneurons; i++) {
2863 wptr += (neuronbias[i]==1);
2869 for (b = 0; b<net_nblocks; b++) {
2870 if (net_block[b][5]==1) {
2871 for (
int i = 0; i<net_block[b][4]; i++) {
2872 for (
int j = 0; j<net_block[b][2]; j++) {
2873 fscanf(fp,
"%f", &freep[ wptr+i+j*net_block[b][4] ]);
2878 wptr+=net_block[b][2]*net_block[b][4];
2882 for (
int j=0; j<net_block[b][1]; j++) {
2883 if (neuronbias[j]) {
2888 for (
int i =0; i<net_block[b][2]; i++) {
2889 fscanf(fp,
"%f", &freep[biasptr++]);
2893 else if (net_block[b][0]==0) {
2894 wptr+= net_block[b][2]*net_block[b][4];
2909 int Evonet::exportWeightsToMATLABFile(
char *path) {
2917 FILE *fp = fopen(path,
"w");
2925 for (
int i = 0; i<nneurons; i++){
2926 wptr += (neuronbias[i]==1);
2932 for (b = 0; b<net_nblocks; b++) {
2933 if (net_block[b][5]==1) {
2934 for (
int i = 0; i<net_block[b][4]; i++) {
2935 for (
int j = 0; j<net_block[b][2]; j++) {
2936 fprintf(fp,
"%f\n", freep[ wptr+i+j*net_block[b][4] ]);
2941 wptr+=net_block[b][2]*net_block[b][4];
2945 for (
int j=0; j<net_block[b][1]; j++) {
2946 if (neuronbias[j]) {
2951 for (
int i =0; i<net_block[b][2]; i++) {
2952 fprintf(fp,
"%f\n", freep[biasptr++]);
2956 else if (net_block[b][0]==0) {
2957 wptr+= net_block[b][2]*net_block[b][4];
2979 for (
int i = 0; i<nneurons; i++) {
2980 p += neuronbias[i]==1;
2998 #if defined(_MSC_VER)
2999 #pragma warning(pop)