evonet.cpp
1 /********************************************************************************
2  * FARSA Experiments Library *
3  * Copyright (C) 2007-2012 *
4  * Stefano Nolfi <stefano.nolfi@istc.cnr.it> *
5  * Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it> *
6  * Gianluca Massera <emmegian@yahoo.it> *
7  * Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it> *
8  * *
9  * This program is free software; you can redistribute it and/or modify *
10  * it under the terms of the GNU General Public License as published by *
11  * the Free Software Foundation; either version 2 of the License, or *
12  * (at your option) any later version. *
13  * *
14  * This program is distributed in the hope that it will be useful, *
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
17  * GNU General Public License for more details. *
18  * *
19  * You should have received a copy of the GNU General Public License *
20  * along with this program; if not, write to the Free Software *
21  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
22  ********************************************************************************/
23 
24 #include "evonet.h"
25 #include "logger.h"
26 #include "configurationhelper.h"
27 #include "evonetui.h"
28 #include <QFileInfo>
29 #include <cstring>
30 
31 #ifndef FARSA_MAC
32  #include <malloc.h> // DEBUG: to be sobstituted with new malloc()
33 #endif
34 
35 // All the suff below is to avoid warnings on Windows about the use of unsafe
36 // functions. This should be only a temporary workaround, the solution is stop
37 // using C string and file functions...
38 #if defined(_MSC_VER)
39  #pragma warning(push)
40  #pragma warning(disable:4996)
41 #endif
42 
43 namespace farsa {
44 
45 const float Evonet::DEFAULT_VALUE = -99.0f;
46 
47 Evonet::Evonet()
48 {
49  wrange = 5.0; // weight range
50  grange = 5.0; // gain range
51  brange = 5.0; // bias range
52  neuronlesions = 0;
53  freep = new float[1000];
54  phep = NULL;
55  muts = NULL;
56  geneMaxValue = 255;
57  pheloaded = false;
58  selectedp= (float **) malloc(100 * sizeof(float **));
59  for (int i = 0; i < MAXN; i++) {
60  neuronlesion[i]=false;
61  neuronlesionVal[i]=0.0;
62  }
63  net_nblocks = 0;
64 
65  nextStoredActivation = 0;
66  firstStoredActivation = 0;
67  updatescounter = 0;
68 }
69 
70 void Evonet::configure(ConfigurationParameters& params, QString prefix) {
71  int nSensors = ConfigurationHelper::getInt( params, prefix+"nSensors", 0 );
72  int nHiddens = ConfigurationHelper::getInt( params, prefix+"nHiddens", 0 );
73  int nMotors = ConfigurationHelper::getInt( params, prefix+"nMotors", 0 );
74  QString netFile = ConfigurationHelper::getString( params, prefix+"netFile", "" );
75  // --- some parameters are in conflicts
76  if ( netFile != "" && (nSensors+nHiddens+nMotors)>0 ) {
77  Logger::error( "Evonet - The information inside netFile will override any specification in all others parameters of Evonet" );
78  }
79  wrange = ConfigurationHelper::getDouble(params, prefix + "weightRange", 5.0); // the range of synaptic weights
80  grange = ConfigurationHelper::getDouble(params, prefix + "gainRange", 5.0); // the range of gains
81  brange = ConfigurationHelper::getDouble(params, prefix + "biasRange", 5.0); // the range of biases
82 
83  if ( netFile.isEmpty() ) {
84  // generate a neural network from parameters
85  ninputs = nSensors;
86  nhiddens = nHiddens;
87  noutputs = nMotors;
88  nneurons = ninputs + nhiddens + noutputs;
89  if (this->nneurons > MAXN) {
90  ConfigurationHelper::throwUserConfigError(prefix + "(nSensors + nHiddens + nMotors)", QString::number(nneurons), "Too many neurons: increase MAXN to support more than " + QString::number(MAXN) + " neurons");
91  }
92  int inputNeuronType = 0;
93  QString str = ConfigurationHelper::getString( params, prefix + "inputNeuronType", "no_delta" );
94  if ( str == QString("no_delta") ) {
95  inputNeuronType = 0;
96  } else if ( str == QString("with_delta") ) {
97  inputNeuronType = 1;
98  } else {
99  ConfigurationHelper::throwUserConfigError(prefix + "inputNeuronType", str, "Wrong value (use \"no_delta\" or \"with_delta\"");
100  }
101  int hiddenNeuronType = 0;
102  str = ConfigurationHelper::getString( params, prefix + "hiddenNeuronType", "logistic" );
103  if ( str == QString("logistic") ) {
104  hiddenNeuronType = 0;
105  } else if ( str == QString("logistic+delta") ) {
106  hiddenNeuronType = 1;
107  } else if ( str == QString("binary") ) {
108  hiddenNeuronType = 2;
109  } else if ( str == QString("logistic_0.2") ) {
110  hiddenNeuronType = 3;
111  } else {
112  ConfigurationHelper::throwUserConfigError(prefix + "hiddenNeuronType", str, "Wrong value (use \"logistic\", \"logistic+delta\", \"binary\" or \"logistic_0.2\"");
113  }
114  int outputNeuronType = 0;
115  str = ConfigurationHelper::getString( params, prefix + "outputNeuronType", "no_delta" );
116  if ( str == QString("no_delta") ) {
117  outputNeuronType = 0;
118  } else if ( str == QString("with_delta") ) {
119  outputNeuronType = 1;
120  } else {
121  ConfigurationHelper::throwUserConfigError(prefix + "outputNeuronType", str, "Wrong value (use \"no_delta\" or \"with_delta\"");
122  }
123  bool recurrentHiddens = ConfigurationHelper::getBool( params, prefix + "recurrentHiddens", false );
124  bool inputOutputConnections = ConfigurationHelper::getBool( params, prefix + "inputOutputConnections", false );
125  bool recurrentOutputs = ConfigurationHelper::getBool( params, prefix + "recurrentOutputs", false );
126  bool biasOnHidden = ConfigurationHelper::getBool( params, prefix + "biasOnHiddenNeurons", false );
127  bool biasOnOutput = ConfigurationHelper::getBool( params, prefix + "biasOnOutputNeurons", false );
128  create_net_block( inputNeuronType, hiddenNeuronType, outputNeuronType, recurrentHiddens, inputOutputConnections, recurrentOutputs, biasOnHidden, biasOnOutput );
129  } else {
130  // load the neural network from file. If the file doesn't exists, throwing an exception
131  if (load_net_blocks(netFile.toAscii().data(), 0) == 0) {
132  ConfigurationHelper::throwUserConfigError(prefix + "netFile", netFile, "Could not open the specified network configuration file");
133  }
134  }
135 
136  computeParameters();
137  // --- reallocate data on the basis of number of parameters
138  delete[] freep;
139  freep=new float[nparameters+1000]; // we allocate more space to handle network variations introduced by the user
140  for(int r=0;r<nparameters;r++)
141  freep[r]=0.0f;
142 
143  delete[] phep;
144  phep=new float[nparameters+1000]; // we allocate more space to handle network variations introduced by the user
145  for(int r=0;r<nparameters;r++)
146  phep[r]=DEFAULT_VALUE; // default value correspond to dont' care
147 
148  delete[] muts;
149  muts=new float[nparameters+1000]; // we allocate more space to handle network variations introduced by the user
150  for(int r=0;r<nparameters;r++)
151  muts[r]=DEFAULT_VALUE; // default value correspond to dont' care
152 
153  if ( !netFile.isEmpty() ) {
154  // Try to Load the file filename.phe if present in the current directory
155  QFileInfo fileNet( netFile );
156  QString filePhe = fileNet.baseName() + ".phe";
157  load_net_blocks(filePhe.toAscii().data(), 1);
158  }
159 
160  //resetting net
161  resetNet();
162 
163  printBlocks();
164 
165  // we create the labels of the hidden neurons
166  for(int i = 0; i < nhiddens; i++) {
167  sprintf(neuronl[ninputs+i], "h%d", i);
168  neuronrange[ninputs+i][0] = 0.0;
169  neuronrange[ninputs+i][1] = 1.0;
170  neurondcolor[ninputs+i] = QColor(125,125,125);
171  }
172 }
173 
174 void Evonet::save(ConfigurationParameters& params, QString prefix) {
175  params.startObjectParameters( prefix, "Evonet", this );
176  if ( netFile.isEmpty() ) {
177  params.createParameter( prefix, "nSensors", QString::number(ninputs) );
178  params.createParameter( prefix, "nHidden", QString::number(nhiddens) );
179  params.createParameter( prefix, "nMotors", QString::number(noutputs) );
180  } else {
181  params.createParameter( prefix, "netFile", netFile );
182  }
183  params.createParameter( prefix, "weightRange", QString::number(wrange) );
184  params.createParameter( prefix, "gainRange", QString::number(grange) );
185  params.createParameter( prefix, "biasRange", QString::number(brange) );
186 }
187 
188 void Evonet::describe( QString type ) {
189  Descriptor d = addTypeDescription( type, "Neural Network imported from Evorobot" );
190  d.describeInt( "nSensors" ).limits( 1, MAXN ).help( "The number of sensor neurons" );
191  d.describeInt( "nHiddens" ).limits( 1, MAXN ).help( "The number of hidden neurons" );
192  d.describeInt( "nMotors" ).limits( 1, MAXN ).help( "The number of motor neurons" );
193  d.describeString( "netFile" ).help( "The file .net where is defined the architecture to load. WARNING: when this parameter is specified any other parameters will be ignored" );
194  d.describeReal( "weightRange" ).def(5.0f).limits(1,+Infinity).help( "The synpatic weight of the neural network can only assume values in [-weightRange, +weightRange]" );
195  d.describeReal( "gainRange" ).def(5.0f).limits(0,+Infinity).help( "The gain of a neuron will can only assume values in [0, +gainRange]" );
196  d.describeReal( "biasRange" ).def(5.0f).limits(0,+Infinity).help( "The bias of a neuron will can only assume values in [-biasRange, +biasRange]" );
197  d.describeEnum( "inputNeuronType" ).def("no_delta").values( QStringList() << "no_delta" << "with_delta" ).help( "The type of input neurons when the network is auto generated");
198  d.describeEnum( "hiddenNeuronType" ).def("logistic").values( QStringList() << "logistic" << "logistic+delta" << "binary" << "logistic_0.2" ).help( "The type of hidden neurons when the network is auto generated");
199  d.describeEnum( "outputNeuronType" ).def("no_delta").values( QStringList() << "no_delta" << "with_delta" ).help( "The type of output neurons when the network is auto generated");
200  d.describeBool( "recurrentHiddens" ).def(false).help( "when true generated a network with recurrent hidden neurons");
201  d.describeBool( "inputOutputConnections" ).def(false).help( "when true generated a network with input-output connections in addition to input-hidden-output connections");
202  d.describeBool( "recurrentOutputs" ).def(false).help( "when true generated a network with recurrent output neurons");
203  d.describeBool( "biasOnHiddenNeurons" ).def(true).help( "when true generate a network with hidden neurons with a bias");
204  d.describeBool( "biasOnOutputNeurons" ).def(true).help( "when true generate a network with output neurons with a bias");
205 }
206 
208  return new EvonetUI( this );
209 }
210 
211 void Evonet::create_net_block( int inputNeuronType, int hiddenNeuronType, int outputNeuronType, bool recurrentHiddens, bool inputOutputConnections, bool recurrentOutputs, bool biasOnHidden, bool biasOnOutput )
212 {
213  int n;
214  int i;
215  int startx;
216  int dx;
217 
218  // setting the neuron types
219  for(i = 0; i < this->ninputs; i++) {
220  this->neurontype[i]= inputNeuronType;
221  neuronbias[i] = 0;
222  }
223  for(i = this->ninputs; i < (this->nneurons - this->noutputs); i++) {
224  this->neurontype[i]= hiddenNeuronType;
225  neuronbias[i] = (biasOnHidden) ? 1 : 0;
226  }
227  for(i = (this->nneurons - this->noutputs); i < this->nneurons; i++) {
228  this->neurontype[i]= outputNeuronType;
229  neuronbias[i] = (biasOnOutput) ? 1 : 0;
230  }
231 
232  // gain
233  for(i=0; i < this->nneurons; i++) {
234  this->neurongain[i]= 0;
235  }
236 
237  this->net_nblocks = 0;
238  // input update block
239  this->net_block[this->net_nblocks][0] = 1;
240  this->net_block[this->net_nblocks][1] = 0;
241  this->net_block[this->net_nblocks][2] = this->ninputs;
242  this->net_block[this->net_nblocks][3] = 0;
243  this->net_block[this->net_nblocks][4] = 0;
244  this->net_block[this->net_nblocks][5] = 0;
245  this->net_nblocks++;
246 
247  // input-hidden connections
248  if (this->nhiddens > 0) {
249  this->net_block[this->net_nblocks][0] = 0;
250  this->net_block[this->net_nblocks][1] = this->ninputs;
251  this->net_block[this->net_nblocks][2] = this->nhiddens;
252  this->net_block[this->net_nblocks][3] = 0;
253  this->net_block[this->net_nblocks][4] = this->ninputs;
254  this->net_block[this->net_nblocks][5] = 0;
255  this->net_nblocks++;
256  }
257 
258  // hidden-hidden connections
259  if (recurrentHiddens) {
260  this->net_block[this->net_nblocks][0] = 0;
261  this->net_block[this->net_nblocks][1] = this->ninputs;
262  this->net_block[this->net_nblocks][2] = this->nhiddens;
263  this->net_block[this->net_nblocks][3] = this->ninputs;
264  this->net_block[this->net_nblocks][4] = this->nhiddens;
265  this->net_block[this->net_nblocks][5] = 0;
266  this->net_nblocks++;
267  }
268 
269  // hidden update block
270  if (this->nhiddens > 0) {
271  this->net_block[this->net_nblocks][0] = 1;
272  this->net_block[this->net_nblocks][1] = this->ninputs;
273  this->net_block[this->net_nblocks][2] = this->nhiddens;
274  this->net_block[this->net_nblocks][3] = 0;
275  this->net_block[this->net_nblocks][4] = 0;
276  this->net_block[this->net_nblocks][5] = 0;
277  this->net_nblocks++;
278  }
279 
280  // input-output connections
281  if (this->nhiddens == 0 || inputOutputConnections) {
282  this->net_block[this->net_nblocks][0] = 0;
283  this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
284  this->net_block[this->net_nblocks][2] = this->noutputs;
285  this->net_block[this->net_nblocks][3] = 0;
286  this->net_block[this->net_nblocks][4] = this->ninputs;
287  this->net_block[this->net_nblocks][5] = 0;
288  this->net_nblocks++;
289  }
290 
291  // hidden-output connections
292  if (this->nhiddens > 0) {
293  this->net_block[net_nblocks][0] = 0;
294  this->net_block[net_nblocks][1] = this->ninputs + this->nhiddens;
295  this->net_block[net_nblocks][2] = this->noutputs;
296  this->net_block[net_nblocks][3] = this->ninputs;
297  this->net_block[net_nblocks][4] = this->nhiddens;
298  this->net_block[this->net_nblocks][5] = 0;
299  this->net_nblocks++;
300  }
301 
302  // output-output connections
303  if (recurrentOutputs) {
304  this->net_block[this->net_nblocks][0] = 0;
305  this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
306  this->net_block[this->net_nblocks][2] = this->noutputs;
307  this->net_block[this->net_nblocks][3] = this->ninputs + this->nhiddens;
308  this->net_block[this->net_nblocks][4] = this->noutputs;
309  this->net_block[this->net_nblocks][5] = 0;
310  this->net_nblocks++;
311  }
312 
313  // output update block
314  this->net_block[this->net_nblocks][0] = 1;
315  this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
316  this->net_block[this->net_nblocks][2] = this->noutputs;
317  this->net_block[this->net_nblocks][3] = 0;
318  this->net_block[this->net_nblocks][4] = 0;
319  this->net_block[this->net_nblocks][5] = 0;
320  this->net_nblocks++;
321 
322  // cartesian xy coordinate for sensory neurons for display (y=400)
323  n = 0;
324  dx = 30;//25
325  if (this->ninputs > this->noutputs) {
326  startx = 50;
327  } else {
328  startx = ((this->noutputs - this->ninputs) / 2) * dx + 50;
329  }
330  for(i = 0; i < this->ninputs; i++, n++) {
331  this->neuronxy[n][0] = (i * dx) + startx;
332  this->neuronxy[n][1] = 400;
333  }
334 
335  // cartesian xy coordinate for internal neurons for display (y=225)
336  startx = this->ninputs * dx;
337  for(i=0; i < (this->nneurons - (this->ninputs + this->noutputs)); i++, n++) {
338  this->neuronxy[n][0] = startx + (i * dx);
339  this->neuronxy[n][1] = 225;
340  }
341 
342  // cartesian xy coordinate for motor neurons for display (y=50)
343  if (this->ninputs > this->noutputs) {
344  startx = ((this->ninputs - this->noutputs) / 2) * dx + 50;
345  } else {
346  startx = 50;
347  }
348  for(i=0; i < this->noutputs; i++, n++) {
349  this->neuronxy[n][0] = startx + (i * dx);
350  this->neuronxy[n][1] = 50;
351  }
352 
353  // set neurons whose activation should be displayed
354  for(i=0; i < this->nneurons; i++) {
355  this->neurondisplay[i] = 1;
356  }
357 
358  // calculate the height and width necessary to display all created neurons (drawnymax, drawnxmax)
359  drawnymax = 400 + 30;
360  for(i = 0, drawnxmax = 0; i < nneurons; i++) {
361  if (neuronxy[i][0] > drawnxmax) {
362  drawnxmax = neuronxy[i][0];
363  }
364  }
365  drawnxmax += 60;
366 
367  // compute the number of parameters
368  computeParameters();
369 
370  //i = this->ninputs;
371  //if (this->ninputs > i)
372  // i = this->noutputs;
373  //if ((this->nneurons - this->noutputs) > i)
374  // i = (this->nneurons - this->noutputs);
375  //drawnxmax = (i * dx) + dx + 30;
376 }
377 
378 int Evonet::load_net_blocks(const char *filename, int mode)
379 {
380 
381  FILE *fp;
382  int b;
383  int n;
384  int i;
385  float *ph;
386  float *mu;
387  float *p;
388  int np;
389  const int bufferSize = 128;
390  char cbuffer[bufferSize];
391 
392  if ((fp = fopen(filename,"r")) != NULL)
393  {
394  fscanf(fp,"ARCHITECTURE\n");
395  fscanf(fp,"nneurons %d\n", &nneurons);
396  fscanf(fp,"nsensors %d\n", &ninputs);
397  fscanf(fp,"nmotors %d\n", &noutputs);
398  if (nneurons > MAXN)
399  Logger::error( "Evonet - increase MAXN to support more than "+QString::number(MAXN)+" neurons" );
400  nhiddens = nneurons - (ninputs + noutputs);
401  fscanf(fp,"nblocks %d\n", &net_nblocks);
402  for (b=0; b < net_nblocks; b++)
403  {
404  fscanf(fp,"%d %d %d %d %d", &net_block[b][0],&net_block[b][1],&net_block[b][2],&net_block[b][3],&net_block[b][4]);
405  net_block[b][5] = 0; // stefano: this value should also be loaded from the file
406  if (net_block[b][0] == 0)
407  fscanf(fp," // connections block\n");
408  if (net_block[b][0] == 1)
409  fscanf(fp," // block to be updated\n");
410  if (net_block[b][0] == 2)
411  fscanf(fp," // gain block\n");
412  if (net_block[b][0] == 3)
413  fscanf(fp," // modulated gain block\n");
414  }
415  fscanf(fp,"neurons bias, delta, gain, xy position, display\n");
416  drawnxmax = 0;
417  drawnymax = 0;
418  for(n=0; n < nneurons; n++)
419  {
420  fscanf(fp,"%d %d %d %d %d %d\n", &neuronbias[n], &neurontype[n], &neurongain[n], &neuronxy[n][0], &neuronxy[n][1], &neurondisplay[n]);
421  if(drawnxmax < neuronxy[n][0])
422  drawnxmax = neuronxy[n][0];
423  if(drawnymax < neuronxy[n][1])
424  drawnymax = neuronxy[n][1];
425  }
426  drawnxmax += 30;
427  drawnymax += 30;
428 
429  if (mode == 1)
430  {
431  fscanf(fp,"FREE PARAMETERS %d\n", &np);
432  if (nparameters != np) {
433  Logger::error(QString("ERROR: parameters defined are %1 while %2 contains %3 parameters").arg(nparameters).arg(filename).arg(np));
434  }
435  i = 0;
436  ph = phep;
437  mu = muts;
438  p = freep;
439 
440  while (fgets(cbuffer,bufferSize,fp) != NULL && i < np)
441  {
442  //read values from line
443  QString line = cbuffer;
444  QStringList lineContent = line.split(QRegExp("\\s+"), QString::SkipEmptyParts);
445 
446  bool floatOnSecondPlace = false;
447  lineContent[1].toFloat(&floatOnSecondPlace);
448 
449  if(lineContent.contains("*") || floatOnSecondPlace)
450  readNewPheLine(lineContent, ph, mu);
451  else
452  readOldPheLine(lineContent, ph, mu);
453 
454  *p = *ph;
455 
456  i++;
457  mu++;
458  ph++;
459  p++;
460  }
461  pheloaded = true;
462  }
463  fclose(fp);
464 
465  Logger::info( "Evonet - loaded file " + QString(filename) );
466  return(1);
467  }
468  else
469  {
470  Logger::warning( "Evonet - File " + QString(filename) + " not found" );
471  return(0);
472  }
473 }
474 
475 void Evonet::readOldPheLine(QStringList line, float* par, float* mut)
476 {
477  *par = line[0].toFloat();
478 
479  if(*par != DEFAULT_VALUE) { //no mutations
480  *mut = 0;
481  }
482 }
483 
484 void Evonet::readNewPheLine(QStringList line, float* par, float* mut)
485 {
486  if(line[0] == "*") {
487  *par = DEFAULT_VALUE; //start at random
488  } else {
489  //error handling
490  *par = line[0].toFloat();
491  }
492 
493  if(line[1] == "*") {
494  *mut = DEFAULT_VALUE;
495  } else {
496  *mut = line[1].toFloat();
497  }
498 }
499 
500 /*
501  * It save the architecture and also the parameters (when mode =1)
502  */
503 void Evonet::save_net_blocks(const char *filename, int mode)
504 {
505  FILE *fp;
506  int b;
507  int n;
508  int i;
509  int t;
510 
511  char* default_string = "*\t\t";
512  char **p = new char*[freeParameters()];
513  char **mu = new char*[freeParameters()];
514  for(int h=0; h<freeParameters(); h++) {
515  mu[h] = new char[50];
516  p[h] = new char[50];
517 
518  if(muts[h] == DEFAULT_VALUE) {
519  mu[h] = default_string;
520  } else {
521  sprintf(mu[h], "%f", muts[h]);
522  }
523 
524  if(freep[h] == DEFAULT_VALUE) {
525  p[h] = default_string;
526  } else {
527  sprintf(p[h], "%f", freep[h]);
528  }
529  }
530 
531  if ((fp = fopen(filename,"w")) != NULL) {
532  fprintf(fp,"ARCHITECTURE\n");
533  fprintf(fp,"nneurons %d\n", nneurons);
534  fprintf(fp,"nsensors %d\n", ninputs);
535  fprintf(fp,"nmotors %d\n", noutputs);
536  fprintf(fp,"nblocks %d\n", net_nblocks);
537  for (b = 0; b < net_nblocks; b++) {
538  fprintf(fp,"%d %d %d %d %d", net_block[b][0],net_block[b][1],net_block[b][2],net_block[b][3],net_block[b][4]);
539  // stefano also the sixt value has to be saved
540  if (net_block[b][0] == 0) {
541  fprintf(fp," // connections block\n");
542  } else if (net_block[b][0] == 1) {
543  fprintf(fp," // block to be updated\n");
544  } else if (net_block[b][0] == 2) {
545  fprintf(fp," // gain block\n");
546  } else if (net_block[b][0] == 3) {
547  fprintf(fp," // modulated gain block\n");
548  }
549  }
550  fprintf(fp,"neurons bias, delta, gain, xy position, display\n");
551  for(n = 0; n < nneurons; n++) {
552  fprintf(fp,"%d %d %d %d %d %d\n", neuronbias[n], neurontype[n], neurongain[n], neuronxy[n][0], neuronxy[n][1], neurondisplay[n]);
553  }
554 
555  computeParameters();
556  if (mode == 1) {
557  fprintf(fp,"FREE PARAMETERS %d\n", nparameters);
558  for(i = 0; i < nneurons; i++) {
559  if (neurongain[i] == 1) {
560  fprintf(fp,"%s \t %s \tgain %s\n",*p, *mu, neuronl[i]);
561  p++;
562  mu++;
563  }
564  }
565  for(i=0; i<nneurons; i++) {
566  if (neuronbias[i] == 1) {
567  fprintf(fp,"%s \t %s \tbias %s\n",*p, *mu, neuronl[i]);
568  p++;
569  mu++;
570  }
571  }
572  for (b=0; b < net_nblocks; b++) {
573  if (net_block[b][0] == 0) {
574  for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
575  for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
576  fprintf(fp,"%s \t %s \tweight %s from %s\n",*p, *mu, neuronl[t], neuronl[i]);
577  p++;
578  mu++;
579  }
580  }
581  } else if (net_block[b][0] == 1) {
582  for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
583  if (neurontype[t] == 1) {
584  float timeC = 0;
585  if(*p != default_string) {
586  timeC = atof(*p);
587  timeC = fabs(timeC)/wrange; //(timeC + wrange)/(wrange*2);
588  }
589 
590  fprintf(fp,"%s \t %s \ttimeconstant %s (%f)\n", *p, *mu, neuronl[t], timeC);
591  p++;
592  mu++;
593  }
594  }
595  }
596  }
597  }
598  fprintf(fp,"END\n");
599 
600  Logger::info( "Evonet - controller saved on file " + QString(filename) );
601  } else {
602  Logger::error( "Evonet - unable to create the file " + QString(filename) );
603  }
604  fclose(fp);
605 }
606 
607 /*
608  * standard logistic
609  */
610 float Evonet::logistic(float f)
611 {
612  return((float) (1.0 / (1.0 + exp(0.0 - f))));
613 }
614 
615 /*
616  * compute the number of free parameters
617  */
618 void Evonet::computeParameters()
619 {
620  int i;
621  int t;
622  int b;
623  int updated[MAXN];
624  int ng;
625  int nwarnings;
626 
627  ng = 0;
628  for(i=0;i < nneurons;i++) {
629  updated[i] = 0;
630  }
631  // gain
632  for(i=0;i < nneurons;i++) {
633  if (neurongain[i] == 1) {
634  ng++;
635  }
636  }
637  // biases
638  for(i=0;i < nneurons;i++) {
639  if (neuronbias[i] == 1) {
640  ng++;
641  }
642  }
643  // timeconstants
644  for(i=0;i < nneurons;i++) {
645  if (neurontype[i] == 1) {
646  ng++;
647  }
648  }
649  // blocks
650  for (b=0; b < net_nblocks; b++) {
651  // connection block
652  if (net_block[b][0] == 0) {
653  for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
654  for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
655  ng++;
656  }
657  }
658  }
659  }
660 
661  nwarnings = 0;
662  for(i=0;i < nneurons;i++) {
663  if (updated[i] < 1 && nwarnings == 0) {
664  Logger::warning( "Evonet - neuron " + QString::number(i) + " will never be activated according to the current architecture" );
665  nwarnings++;
666  }
667  if (updated[i] > 1 && nwarnings == 0) {
668  Logger::warning( "Evonet - neuron " + QString::number(i) + " will be activated more than once according to the current architecture" );
669  nwarnings++;
670  }
671  }
672  nparameters=ng; // number of parameters
673 }
674 
675 void Evonet::updateNet()
676 {
677  int i;
678  int t;
679  int b;
680  float *p;
681  float delta;
682  float netinput[MAXN];
683  float gain[MAXN];
684 
685  p = freep;
686  //nl = neuronlesion;
687 
688  // gain
689  for(i=0;i < nneurons;i++) {
690  if (neurongain[i] == 1) {
691  gain[i] = (float) (fabs((double) *p) / wrange) * grange;
692  p++;
693  } else {
694  gain[i] = 1.0f;
695  }
696  }
697  // biases
698  for(i=0;i < nneurons;i++) {
699  if (neuronbias[i] == 1) {
700  netinput[i] = ((double)*p/wrange)*brange;
701  p++;
702  } else {
703  netinput[i] = 0.0f;
704  }
705  }
706 
707  // blocks
708  for (b=0; b < net_nblocks; b++) {
709  // connection block
710  if (net_block[b][0] == 0) {
711  for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
712  for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
713  netinput[t] += act[i] * gain[i] * *p;
714  p++;
715  }
716  }
717  }
718  // gain block (gain of neuron a-b set equal to gain of neuron a)
719  if (net_block[b][0] == 2) {
720  for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
721  gain[t] = gain[net_block[b][1]];
722  }
723  }
724  // gain block (gain of neuron a-b set equal to act[c])
725  if (net_block[b][0] == 3) {
726  for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
727  gain[t] = act[net_block[b][3]];
728  }
729  }
730  // update block
731  if (net_block[b][0] == 1) {
732  for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
733  if (t < ninputs) {
734  switch(neurontype[t]) {
735  case 0: // simple rely units
736  act[t] = input[t];
737  break;
738  case 1: // delta neurons
739  delta = (float) (fabs((double) *p) / wrange);
740  p++;
741  act[t] = (act[t] * delta) + (input[t] * (1.0f - delta));
742  break;
743  }
744  if(neuronlesions > 0 && neuronlesion[t]) {
745  act[t]= (float)neuronlesionVal[t];
746  }
747  } else {
748  switch(neurontype[t]) {
749  case 0: // simple logistic
750  act[t] = logistic(netinput[t]);
751  delta = 0.0;
752  break;
753  case 1: // delta neurons
754  delta = (float) (fabs((double) *p) / wrange);
755  p++;
756  act[t] = (act[t] * delta) + (logistic(netinput[t]) * (1.0f - delta));
757  break;
758  case 2: // binary neurons
759  if (netinput[t] >= 0.0) {
760  act[t] = 1.0;
761  } else {
762  act[t] = 0.0;
763  }
764  break;
765  case 3: // logistic2 neurons
766  act[t] = logistic(netinput[t]*0.2f);
767  delta = 0.0;
768  break;
769  }
770  if(neuronlesions > 0 && neuronlesion[t]) {
771  act[t]= (float)neuronlesionVal[t];
772  }
773  }
774  }
775  }
776  }
777 
778  // Storing the current activations
779  memcpy(storedActivations[nextStoredActivation], act, nneurons * sizeof(float));
780  nextStoredActivation = (nextStoredActivation + 1) % MAXSTOREDACTIVATIONS;
781  if (firstStoredActivation == nextStoredActivation) {
782  // We have filled the circular buffer, discarding the oldest activation
783  firstStoredActivation += (firstStoredActivation + 1) % MAXSTOREDACTIVATIONS;
784  }
785 
786  // increment the counter
787  updatescounter++;
788 
789  emit evonetUpdated();
790 }
791 
792 int Evonet::setInput(int inp, float value)
793 {
794  if (inp>=ninputs || inp<0) {
795  return -1;// exceding sensor number
796  }
797  input[inp]=value;
798  return 0;
799 }
800 
801 float Evonet::getOutput(int out)
802 {
803  if(out>=noutputs) {
804  return -1; //exceeding out numbers
805  }
806  return act[ninputs+nhiddens+out];
807 }
808 
809 float Evonet::getInput(int in)
810 {
811  return this->input[in];
812 }
813 
814 float Evonet::getNeuron(int in)
815 {
816  return act[in];
817 }
818 
819 void Evonet::resetNet()
820 {
821  int i;
822  for (i = 0; i < MAXN; i++) {
823  act[i]=0.0;
824  netinput[i]=0.0;
825  input[i]=0.0;
826  }
827  updatescounter = 0;
828 }
829 
830 void Evonet::injectHidden(int nh, float val)
831 {
832  if(nh<nhiddens) {
833  act[this->ninputs+nh] = val;
834  }
835 }
836 
837 float Evonet::getHidden(int h)
838 {
839  if(h<nhiddens && h>=0) {
840  return act[this->ninputs+h];
841  } else {
842  return -999;
843  }
844 }
845 
846 int Evonet::freeParameters()
847 {
848  return this->nparameters;
849 }
850 
851 bool Evonet::pheFileLoaded()
852 {
853  return pheloaded;
854 }
855 
856 /*
857  * Copy parameters from genotype
858  */
859 void Evonet::getParameters(const int *dt)
860 {
861  int i;
862  float *p;
863 
864  p = freep;
865  for (i=0; i<freeParameters(); i++, p++) {
866  *p = wrange - ((float)dt[i]/geneMaxValue)*wrange*2;
867  }
868 }
869 
870 void Evonet::getMutations(float* GAmut)
871 {
872  //copy mutation vector
873  for(int i=0; i<freeParameters(); i++) {
874  GAmut[i] = muts[i];
875  }
876 }
877 
878 void Evonet::copyPheParameters(int* pheGene)
879 {
880  for(int i=0; i<freeParameters(); i++)
881  {
882  if(phep[i] == DEFAULT_VALUE) {
883  pheGene[i] = DEFAULT_VALUE;
884  } else {
885  pheGene[i] = (int)((wrange - phep[i])*geneMaxValue/(2*wrange));
886  }
887  }
888 }
889 
890 void Evonet::printIO()
891 {
892  QString output;
893 
894  output = "In: ";
895  for (int in = 0; in < this->ninputs; in++) {
896  output += QString("%1 ").arg(this->input[in], 0, 'f', 3);
897  }
898  output += "Hid: ";
899  for (int hi = this->ninputs; hi < (this->nneurons - this->noutputs); hi++) {
900  output += QString("%1 ").arg(this->act[hi], 0, 'f', 3);
901  }
902  output += "Out: ";
903  for (int out = 0; out < this->noutputs; out++) {
904  output += QString("%1 ").arg(this->act[this->ninputs+this->nhiddens+out], 0, 'f', 3);
905  }
906 
907  Logger::info(output);
908 
909 }
910 
911 int Evonet::getParamBias(int nbias)
912 {
913  int pb=-999; // if remain -999 it means nbias is out of range
914  if (nbias<nparambias && nbias>-1) {
915  pb=(int) freep[nparambias+nbias];
916  }
917  return pb;
918 }
919 
920 float Evonet::getWrange()
921 {
922  return wrange;
923 }
924 
925 
926 void Evonet::printBlocks()
927 {
928  Logger::info("Evonet - ninputs " + QString::number(this->ninputs));
929  Logger::info("Evonet - nhiddens " + QString::number(this->nhiddens));
930  Logger::info("Evonet - noutputs " + QString::number(this->noutputs));
931  Logger::info("Evonet - nneurons " + QString::number(this->nneurons));
932 
933  for(int i=0;i<this->net_nblocks;i++) {
934  Logger::info( QString( "Evonet Block - %1 | %2 - %3 -> %4 - %5 | %6" )
935  .arg(net_block[i][0])
936  .arg(net_block[i][1])
937  .arg(net_block[i][2])
938  .arg(net_block[i][3])
939  .arg(net_block[i][4])
940  .arg(net_block[i][5]));
941  }
942 }
943 
944 int Evonet::getNoInputs()
945 {
946  return ninputs;
947 }
948 
949 int Evonet::getNoHiddens()
950 {
951  return nhiddens;
952 }
953 
954 int Evonet::getNoOutputs()
955 {
956  return noutputs;
957 }
958 
959 int Evonet::getNoNeurons()
960 {
961  return nneurons;
962 }
963 
964 void Evonet::setRanges(double weight, double bias, double gain)
965 {
966  wrange=weight;
967  brange=bias;
968  grange=gain;
969 }
970 
972 {
973  if (firstStoredActivation == nextStoredActivation) {
974  return NULL;
975  }
976 
977  const int ret = firstStoredActivation;
978  firstStoredActivation = (firstStoredActivation + 1) % MAXSTOREDACTIVATIONS;
979 
980  return storedActivations[ret];
981 }
982 
984  return updatescounter;
985 }
986 
987 } // end namespace farsa
988 
989 // All the suff below is to restore the warning state on Windows
990 #if defined(_MSC_VER)
991  #pragma warning(pop)
992 #endif