26 #include "configurationhelper.h"
40 #pragma warning(disable:4996)
53 freep =
new float[1000];
58 selectedp= (
float **) malloc(100 *
sizeof(
float **));
59 for (
int i = 0; i <
MAXN; i++) {
60 neuronlesion[i]=
false;
61 neuronlesionVal[i]=0.0;
65 nextStoredActivation = 0;
66 firstStoredActivation = 0;
76 if ( netFile !=
"" && (nSensors+nHiddens+nMotors)>0 ) {
77 Logger::error(
"Evonet - The information inside netFile will override any specification in all others parameters of Evonet" );
83 if ( netFile.isEmpty() ) {
88 nneurons = ninputs + nhiddens + noutputs;
89 if (this->nneurons >
MAXN) {
92 int inputNeuronType = 0;
94 if ( str == QString(
"no_delta") ) {
96 }
else if ( str == QString(
"with_delta") ) {
101 int hiddenNeuronType = 0;
103 if ( str == QString(
"logistic") ) {
104 hiddenNeuronType = 0;
105 }
else if ( str == QString(
"logistic+delta") ) {
106 hiddenNeuronType = 1;
107 }
else if ( str == QString(
"binary") ) {
108 hiddenNeuronType = 2;
109 }
else if ( str == QString(
"logistic_0.2") ) {
110 hiddenNeuronType = 3;
114 int outputNeuronType = 0;
116 if ( str == QString(
"no_delta") ) {
117 outputNeuronType = 0;
118 }
else if ( str == QString(
"with_delta") ) {
119 outputNeuronType = 1;
128 create_net_block( inputNeuronType, hiddenNeuronType, outputNeuronType, recurrentHiddens, inputOutputConnections, recurrentOutputs, biasOnHidden, biasOnOutput );
139 freep=
new float[nparameters+1000];
140 for(
int r=0;r<nparameters;r++)
144 phep=
new float[nparameters+1000];
145 for(
int r=0;r<nparameters;r++)
149 muts=
new float[nparameters+1000];
150 for(
int r=0;r<nparameters;r++)
153 if ( !netFile.isEmpty() ) {
155 QFileInfo fileNet( netFile );
156 QString filePhe = fileNet.baseName() +
".phe";
166 for(
int i = 0; i < nhiddens; i++) {
167 sprintf(neuronl[ninputs+i],
"h%d", i);
168 neuronrange[ninputs+i][0] = 0.0;
169 neuronrange[ninputs+i][1] = 1.0;
170 neurondcolor[ninputs+i] = QColor(125,125,125);
176 if ( netFile.isEmpty() ) {
177 params.
createParameter( prefix,
"nSensors", QString::number(ninputs) );
178 params.
createParameter( prefix,
"nHidden", QString::number(nhiddens) );
179 params.
createParameter( prefix,
"nMotors", QString::number(noutputs) );
183 params.
createParameter( prefix,
"weightRange", QString::number(wrange) );
184 params.
createParameter( prefix,
"gainRange", QString::number(grange) );
185 params.
createParameter( prefix,
"biasRange", QString::number(brange) );
193 d.
describeString(
"netFile" ).
help(
"The file .net where is defined the architecture to load. WARNING: when this parameter is specified any other parameters will be ignored" );
194 d.
describeReal(
"weightRange" ).
def(5.0f).
limits(1,+
Infinity).
help(
"The synpatic weight of the neural network can only assume values in [-weightRange, +weightRange]" );
197 d.
describeEnum(
"inputNeuronType" ).
def(
"no_delta").
values( QStringList() <<
"no_delta" <<
"with_delta" ).
help(
"The type of input neurons when the network is auto generated");
198 d.
describeEnum(
"hiddenNeuronType" ).
def(
"logistic").
values( QStringList() <<
"logistic" <<
"logistic+delta" <<
"binary" <<
"logistic_0.2" ).
help(
"The type of hidden neurons when the network is auto generated");
199 d.
describeEnum(
"outputNeuronType" ).
def(
"no_delta").
values( QStringList() <<
"no_delta" <<
"with_delta" ).
help(
"The type of output neurons when the network is auto generated");
200 d.
describeBool(
"recurrentHiddens" ).
def(
false).
help(
"when true generated a network with recurrent hidden neurons");
201 d.
describeBool(
"inputOutputConnections" ).
def(
false).
help(
"when true generated a network with input-output connections in addition to input-hidden-output connections");
202 d.
describeBool(
"recurrentOutputs" ).
def(
false).
help(
"when true generated a network with recurrent output neurons");
203 d.
describeBool(
"biasOnHiddenNeurons" ).
def(
true).
help(
"when true generate a network with hidden neurons with a bias");
204 d.
describeBool(
"biasOnOutputNeurons" ).
def(
true).
help(
"when true generate a network with output neurons with a bias");
211 void Evonet::create_net_block(
int inputNeuronType,
int hiddenNeuronType,
int outputNeuronType,
bool recurrentHiddens,
bool inputOutputConnections,
bool recurrentOutputs,
bool biasOnHidden,
bool biasOnOutput )
219 for(i = 0; i < this->ninputs; i++) {
220 this->neurontype[i]= inputNeuronType;
223 for(i = this->ninputs; i < (this->nneurons - this->noutputs); i++) {
224 this->neurontype[i]= hiddenNeuronType;
225 neuronbias[i] = (biasOnHidden) ? 1 : 0;
227 for(i = (this->nneurons - this->noutputs); i < this->nneurons; i++) {
228 this->neurontype[i]= outputNeuronType;
229 neuronbias[i] = (biasOnOutput) ? 1 : 0;
233 for(i=0; i < this->nneurons; i++) {
234 this->neurongain[i]= 0;
237 this->net_nblocks = 0;
239 this->net_block[this->net_nblocks][0] = 1;
240 this->net_block[this->net_nblocks][1] = 0;
241 this->net_block[this->net_nblocks][2] = this->ninputs;
242 this->net_block[this->net_nblocks][3] = 0;
243 this->net_block[this->net_nblocks][4] = 0;
244 this->net_block[this->net_nblocks][5] = 0;
248 if (this->nhiddens > 0) {
249 this->net_block[this->net_nblocks][0] = 0;
250 this->net_block[this->net_nblocks][1] = this->ninputs;
251 this->net_block[this->net_nblocks][2] = this->nhiddens;
252 this->net_block[this->net_nblocks][3] = 0;
253 this->net_block[this->net_nblocks][4] = this->ninputs;
254 this->net_block[this->net_nblocks][5] = 0;
259 if (recurrentHiddens) {
260 this->net_block[this->net_nblocks][0] = 0;
261 this->net_block[this->net_nblocks][1] = this->ninputs;
262 this->net_block[this->net_nblocks][2] = this->nhiddens;
263 this->net_block[this->net_nblocks][3] = this->ninputs;
264 this->net_block[this->net_nblocks][4] = this->nhiddens;
265 this->net_block[this->net_nblocks][5] = 0;
270 if (this->nhiddens > 0) {
271 this->net_block[this->net_nblocks][0] = 1;
272 this->net_block[this->net_nblocks][1] = this->ninputs;
273 this->net_block[this->net_nblocks][2] = this->nhiddens;
274 this->net_block[this->net_nblocks][3] = 0;
275 this->net_block[this->net_nblocks][4] = 0;
276 this->net_block[this->net_nblocks][5] = 0;
281 if (this->nhiddens == 0 || inputOutputConnections) {
282 this->net_block[this->net_nblocks][0] = 0;
283 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
284 this->net_block[this->net_nblocks][2] = this->noutputs;
285 this->net_block[this->net_nblocks][3] = 0;
286 this->net_block[this->net_nblocks][4] = this->ninputs;
287 this->net_block[this->net_nblocks][5] = 0;
292 if (this->nhiddens > 0) {
293 this->net_block[net_nblocks][0] = 0;
294 this->net_block[net_nblocks][1] = this->ninputs + this->nhiddens;
295 this->net_block[net_nblocks][2] = this->noutputs;
296 this->net_block[net_nblocks][3] = this->ninputs;
297 this->net_block[net_nblocks][4] = this->nhiddens;
298 this->net_block[this->net_nblocks][5] = 0;
303 if (recurrentOutputs) {
304 this->net_block[this->net_nblocks][0] = 0;
305 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
306 this->net_block[this->net_nblocks][2] = this->noutputs;
307 this->net_block[this->net_nblocks][3] = this->ninputs + this->nhiddens;
308 this->net_block[this->net_nblocks][4] = this->noutputs;
309 this->net_block[this->net_nblocks][5] = 0;
314 this->net_block[this->net_nblocks][0] = 1;
315 this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
316 this->net_block[this->net_nblocks][2] = this->noutputs;
317 this->net_block[this->net_nblocks][3] = 0;
318 this->net_block[this->net_nblocks][4] = 0;
319 this->net_block[this->net_nblocks][5] = 0;
325 if (this->ninputs > this->noutputs) {
328 startx = ((this->noutputs - this->ninputs) / 2) * dx + 50;
330 for(i = 0; i < this->ninputs; i++, n++) {
331 this->neuronxy[n][0] = (i * dx) + startx;
332 this->neuronxy[n][1] = 400;
336 startx = this->ninputs * dx;
337 for(i=0; i < (this->nneurons - (this->ninputs + this->noutputs)); i++, n++) {
338 this->neuronxy[n][0] = startx + (i * dx);
339 this->neuronxy[n][1] = 225;
343 if (this->ninputs > this->noutputs) {
344 startx = ((this->ninputs - this->noutputs) / 2) * dx + 50;
348 for(i=0; i < this->noutputs; i++, n++) {
349 this->neuronxy[n][0] = startx + (i * dx);
350 this->neuronxy[n][1] = 50;
354 for(i=0; i < this->nneurons; i++) {
355 this->neurondisplay[i] = 1;
359 drawnymax = 400 + 30;
360 for(i = 0, drawnxmax = 0; i < nneurons; i++) {
361 if (neuronxy[i][0] > drawnxmax) {
362 drawnxmax = neuronxy[i][0];
389 const int bufferSize = 128;
390 char cbuffer[bufferSize];
392 if ((fp = fopen(filename,
"r")) != NULL)
394 fscanf(fp,
"ARCHITECTURE\n");
395 fscanf(fp,
"nneurons %d\n", &nneurons);
396 fscanf(fp,
"nsensors %d\n", &ninputs);
397 fscanf(fp,
"nmotors %d\n", &noutputs);
399 Logger::error(
"Evonet - increase MAXN to support more than "+QString::number(
MAXN)+
" neurons" );
400 nhiddens = nneurons - (ninputs + noutputs);
401 fscanf(fp,
"nblocks %d\n", &net_nblocks);
402 for (b=0; b < net_nblocks; b++)
404 fscanf(fp,
"%d %d %d %d %d", &net_block[b][0],&net_block[b][1],&net_block[b][2],&net_block[b][3],&net_block[b][4]);
406 if (net_block[b][0] == 0)
407 fscanf(fp,
" // connections block\n");
408 if (net_block[b][0] == 1)
409 fscanf(fp,
" // block to be updated\n");
410 if (net_block[b][0] == 2)
411 fscanf(fp,
" // gain block\n");
412 if (net_block[b][0] == 3)
413 fscanf(fp,
" // modulated gain block\n");
415 fscanf(fp,
"neurons bias, delta, gain, xy position, display\n");
418 for(n=0; n < nneurons; n++)
420 fscanf(fp,
"%d %d %d %d %d %d\n", &neuronbias[n], &neurontype[n], &neurongain[n], &neuronxy[n][0], &neuronxy[n][1], &neurondisplay[n]);
421 if(drawnxmax < neuronxy[n][0])
422 drawnxmax = neuronxy[n][0];
423 if(drawnymax < neuronxy[n][1])
424 drawnymax = neuronxy[n][1];
431 fscanf(fp,
"FREE PARAMETERS %d\n", &np);
432 if (nparameters != np) {
433 Logger::error(QString(
"ERROR: parameters defined are %1 while %2 contains %3 parameters").arg(nparameters).arg(filename).arg(np));
440 while (fgets(cbuffer,bufferSize,fp) != NULL && i < np)
443 QString line = cbuffer;
444 QStringList lineContent = line.split(QRegExp(
"\\s+"), QString::SkipEmptyParts);
446 bool floatOnSecondPlace =
false;
447 lineContent[1].toFloat(&floatOnSecondPlace);
449 if(lineContent.contains(
"*") || floatOnSecondPlace)
450 readNewPheLine(lineContent, ph, mu);
452 readOldPheLine(lineContent, ph, mu);
465 Logger::info(
"Evonet - loaded file " + QString(filename) );
470 Logger::warning(
"Evonet - File " + QString(filename) +
" not found" );
475 void Evonet::readOldPheLine(QStringList line,
float* par,
float* mut)
477 *par = line[0].toFloat();
484 void Evonet::readNewPheLine(QStringList line,
float* par,
float* mut)
490 *par = line[0].toFloat();
496 *mut = line[1].toFloat();
503 void Evonet::save_net_blocks(
const char *filename,
int mode)
511 char* default_string =
"*\t\t";
512 char **p =
new char*[freeParameters()];
513 char **mu =
new char*[freeParameters()];
514 for(
int h=0; h<freeParameters(); h++) {
515 mu[h] =
new char[50];
519 mu[h] = default_string;
521 sprintf(mu[h],
"%f", muts[h]);
525 p[h] = default_string;
527 sprintf(p[h],
"%f", freep[h]);
531 if ((fp = fopen(filename,
"w")) != NULL) {
532 fprintf(fp,
"ARCHITECTURE\n");
533 fprintf(fp,
"nneurons %d\n", nneurons);
534 fprintf(fp,
"nsensors %d\n", ninputs);
535 fprintf(fp,
"nmotors %d\n", noutputs);
536 fprintf(fp,
"nblocks %d\n", net_nblocks);
537 for (b = 0; b < net_nblocks; b++) {
538 fprintf(fp,
"%d %d %d %d %d", net_block[b][0],net_block[b][1],net_block[b][2],net_block[b][3],net_block[b][4]);
540 if (net_block[b][0] == 0) {
541 fprintf(fp,
" // connections block\n");
542 }
else if (net_block[b][0] == 1) {
543 fprintf(fp,
" // block to be updated\n");
544 }
else if (net_block[b][0] == 2) {
545 fprintf(fp,
" // gain block\n");
546 }
else if (net_block[b][0] == 3) {
547 fprintf(fp,
" // modulated gain block\n");
550 fprintf(fp,
"neurons bias, delta, gain, xy position, display\n");
551 for(n = 0; n < nneurons; n++) {
552 fprintf(fp,
"%d %d %d %d %d %d\n", neuronbias[n], neurontype[n], neurongain[n], neuronxy[n][0], neuronxy[n][1], neurondisplay[n]);
557 fprintf(fp,
"FREE PARAMETERS %d\n", nparameters);
558 for(i = 0; i < nneurons; i++) {
559 if (neurongain[i] == 1) {
560 fprintf(fp,
"%s \t %s \tgain %s\n",*p, *mu, neuronl[i]);
565 for(i=0; i<nneurons; i++) {
566 if (neuronbias[i] == 1) {
567 fprintf(fp,
"%s \t %s \tbias %s\n",*p, *mu, neuronl[i]);
572 for (b=0; b < net_nblocks; b++) {
573 if (net_block[b][0] == 0) {
574 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
575 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
576 fprintf(fp,
"%s \t %s \tweight %s from %s\n",*p, *mu, neuronl[t], neuronl[i]);
581 }
else if (net_block[b][0] == 1) {
582 for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
583 if (neurontype[t] == 1) {
585 if(*p != default_string) {
587 timeC = fabs(timeC)/wrange;
590 fprintf(fp,
"%s \t %s \ttimeconstant %s (%f)\n", *p, *mu, neuronl[t], timeC);
600 Logger::info(
"Evonet - controller saved on file " + QString(filename) );
602 Logger::error(
"Evonet - unable to create the file " + QString(filename) );
610 float Evonet::logistic(
float f)
612 return((
float) (1.0 / (1.0 + exp(0.0 - f))));
618 void Evonet::computeParameters()
628 for(i=0;i < nneurons;i++) {
632 for(i=0;i < nneurons;i++) {
633 if (neurongain[i] == 1) {
638 for(i=0;i < nneurons;i++) {
639 if (neuronbias[i] == 1) {
644 for(i=0;i < nneurons;i++) {
645 if (neurontype[i] == 1) {
650 for (b=0; b < net_nblocks; b++) {
652 if (net_block[b][0] == 0) {
653 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
654 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
662 for(i=0;i < nneurons;i++) {
663 if (updated[i] < 1 && nwarnings == 0) {
664 Logger::warning(
"Evonet - neuron " + QString::number(i) +
" will never be activated according to the current architecture" );
667 if (updated[i] > 1 && nwarnings == 0) {
668 Logger::warning(
"Evonet - neuron " + QString::number(i) +
" will be activated more than once according to the current architecture" );
675 void Evonet::updateNet()
682 float netinput[
MAXN];
689 for(i=0;i < nneurons;i++) {
690 if (neurongain[i] == 1) {
691 gain[i] = (float) (fabs((
double) *p) / wrange) * grange;
698 for(i=0;i < nneurons;i++) {
699 if (neuronbias[i] == 1) {
700 netinput[i] = ((double)*p/wrange)*brange;
708 for (b=0; b < net_nblocks; b++) {
710 if (net_block[b][0] == 0) {
711 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
712 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
713 netinput[t] += act[i] * gain[i] * *p;
719 if (net_block[b][0] == 2) {
720 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
721 gain[t] = gain[net_block[b][1]];
725 if (net_block[b][0] == 3) {
726 for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
727 gain[t] = act[net_block[b][3]];
731 if (net_block[b][0] == 1) {
732 for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
734 switch(neurontype[t]) {
739 delta = (float) (fabs((
double) *p) / wrange);
741 act[t] = (act[t] * delta) + (input[t] * (1.0f - delta));
744 if(neuronlesions > 0 && neuronlesion[t]) {
745 act[t]= (float)neuronlesionVal[t];
748 switch(neurontype[t]) {
750 act[t] = logistic(netinput[t]);
754 delta = (float) (fabs((
double) *p) / wrange);
756 act[t] = (act[t] * delta) + (logistic(netinput[t]) * (1.0f - delta));
759 if (netinput[t] >= 0.0) {
766 act[t] = logistic(netinput[t]*0.2f);
770 if(neuronlesions > 0 && neuronlesion[t]) {
771 act[t]= (float)neuronlesionVal[t];
779 memcpy(storedActivations[nextStoredActivation], act, nneurons *
sizeof(
float));
781 if (firstStoredActivation == nextStoredActivation) {
792 int Evonet::setInput(
int inp,
float value)
794 if (inp>=ninputs || inp<0) {
801 float Evonet::getOutput(
int out)
806 return act[ninputs+nhiddens+out];
809 float Evonet::getInput(
int in)
811 return this->input[in];
814 float Evonet::getNeuron(
int in)
819 void Evonet::resetNet()
822 for (i = 0; i <
MAXN; i++) {
830 void Evonet::injectHidden(
int nh,
float val)
833 act[this->ninputs+nh] = val;
837 float Evonet::getHidden(
int h)
839 if(h<nhiddens && h>=0) {
840 return act[this->ninputs+h];
846 int Evonet::freeParameters()
848 return this->nparameters;
851 bool Evonet::pheFileLoaded()
859 void Evonet::getParameters(
const int *dt)
865 for (i=0; i<freeParameters(); i++, p++) {
866 *p = wrange - ((float)dt[i]/geneMaxValue)*wrange*2;
870 void Evonet::getMutations(
float* GAmut)
873 for(
int i=0; i<freeParameters(); i++) {
878 void Evonet::copyPheParameters(
int* pheGene)
880 for(
int i=0; i<freeParameters(); i++)
885 pheGene[i] = (int)((wrange - phep[i])*geneMaxValue/(2*wrange));
890 void Evonet::printIO()
895 for (
int in = 0; in < this->ninputs; in++) {
896 output += QString(
"%1 ").arg(this->input[in], 0,
'f', 3);
899 for (
int hi = this->ninputs; hi < (this->nneurons - this->noutputs); hi++) {
900 output += QString(
"%1 ").arg(this->act[hi], 0,
'f', 3);
903 for (
int out = 0; out < this->noutputs; out++) {
904 output += QString(
"%1 ").arg(this->act[this->ninputs+this->nhiddens+out], 0,
'f', 3);
911 int Evonet::getParamBias(
int nbias)
914 if (nbias<nparambias && nbias>-1) {
915 pb=(int) freep[nparambias+nbias];
920 float Evonet::getWrange()
926 void Evonet::printBlocks()
928 Logger::info(
"Evonet - ninputs " + QString::number(this->ninputs));
929 Logger::info(
"Evonet - nhiddens " + QString::number(this->nhiddens));
930 Logger::info(
"Evonet - noutputs " + QString::number(this->noutputs));
931 Logger::info(
"Evonet - nneurons " + QString::number(this->nneurons));
933 for(
int i=0;i<this->net_nblocks;i++) {
934 Logger::info( QString(
"Evonet Block - %1 | %2 - %3 -> %4 - %5 | %6" )
935 .arg(net_block[i][0])
936 .arg(net_block[i][1])
937 .arg(net_block[i][2])
938 .arg(net_block[i][3])
939 .arg(net_block[i][4])
940 .arg(net_block[i][5]));
944 int Evonet::getNoInputs()
949 int Evonet::getNoHiddens()
954 int Evonet::getNoOutputs()
959 int Evonet::getNoNeurons()
964 void Evonet::setRanges(
double weight,
double bias,
double gain)
973 if (firstStoredActivation == nextStoredActivation) {
977 const int ret = firstStoredActivation;
980 return storedActivations[ret];
984 return updatescounter;
990 #if defined(_MSC_VER)