experiments/src/evonet.cpp

00001 /********************************************************************************
00002  *  FARSA Experiments Library                                                   *
00003  *  Copyright (C) 2007-2012                                                     *
00004  *  Stefano Nolfi <stefano.nolfi@istc.cnr.it>                                   *
00005  *  Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it>                           *
00006  *  Gianluca Massera <emmegian@yahoo.it>                                        *
00007  *  Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it>                         *
00008  *                                                                              *
00009  *  This program is free software; you can redistribute it and/or modify        *
00010  *  it under the terms of the GNU General Public License as published by        *
00011  *  the Free Software Foundation; either version 2 of the License, or           *
00012  *  (at your option) any later version.                                         *
00013  *                                                                              *
00014  *  This program is distributed in the hope that it will be useful,             *
00015  *  but WITHOUT ANY WARRANTY; without even the implied warranty of              *
00016  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               *
00017  *  GNU General Public License for more details.                                *
00018  *                                                                              *
00019  *  You should have received a copy of the GNU General Public License           *
00020  *  along with this program; if not, write to the Free Software                 *
00021  *  Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA  *
00022  ********************************************************************************/
00023 
00024 #include "evonet.h"
00025 #include "logger.h"
00026 #include "configurationhelper.h"
00027 #include <QFileInfo>
00028 #include <cstring>
00029 
00030 #ifndef FARSA_MAC
00031     #include <malloc.h>  // DEBUG: to be sobstituted with new malloc()
00032 #endif
00033 
00034 // All the suff below is to avoid warnings on Windows about the use of unsafe
00035 // functions. This should be only a temporary workaround, the solution is stop
00036 // using C string and file functions...
00037 #if defined(_MSC_VER)
00038     #pragma warning(push)
00039     #pragma warning(disable:4996)
00040 #endif
00041 
00042 namespace farsa {
00043 
00044 const float Evonet::DEFAULT_VALUE = -99.0f;
00045 
00046 Evonet::Evonet()
00047 {
00048     wrange = 5.0; // weight range
00049     grange = 5.0; // gain range
00050     brange = 5.0; // bias range
00051     neuronlesions = 0;
00052     freep = new float[10];
00053     phep = NULL;
00054     muts = NULL;
00055     geneMaxValue = 255;
00056     pheloaded = false;
00057     selectedp= (float **) malloc(100 * sizeof(float **));
00058     for (int i = 0; i < MAXN; i++) {
00059         neuronlesion[i]=false;
00060         neuronlesionVal[i]=0.0;
00061     }
00062     net_nblocks = 0;
00063 
00064     nextStoredActivation = 0;
00065     firstStoredActivation = 0;
00066 }
00067 
00068 void Evonet::configure(ConfigurationParameters& params, QString prefix) {
00069     int nSensors = ConfigurationHelper::getInt( params, prefix+"nSensors", 0 );
00070     int nHiddens = ConfigurationHelper::getInt( params, prefix+"nHiddens", 0 );
00071     int nMotors = ConfigurationHelper::getInt( params, prefix+"nMotors", 0 );
00072     QString netFile = ConfigurationHelper::getString( params, prefix+"netFile", "" );
00073     // --- some parameters are in conflicts
00074     if ( netFile != "" && (nSensors+nHiddens+nMotors)>0 ) {
00075         Logger::error( "Evonet - The information inside netFile will override any specification in all others parameters of Evonet" );
00076     }
00077     wrange = ConfigurationHelper::getDouble(params, prefix + "weightRange", 5.0); // the range of synaptic weights
00078     grange = ConfigurationHelper::getDouble(params, prefix + "gainRange", 5.0); // the range of gains
00079     brange = ConfigurationHelper::getDouble(params, prefix + "biasRange", 5.0); // the range of biases
00080 
00081     if ( netFile.isEmpty() ) {
00082         // generate a neural network from parameters
00083         ninputs  = nSensors;
00084         nhiddens = nHiddens;
00085         noutputs = nMotors;
00086         nneurons = ninputs + nhiddens + noutputs;
00087         if (this->nneurons > MAXN) {
00088             ConfigurationHelper::throwUserConfigError(prefix + "(nSensors + nHiddens + nMotors)", QString::number(nneurons), "Too many neurons: increase MAXN to support more than " + QString::number(MAXN) + " neurons");
00089         }
00090         int inputNeuronType = 0;
00091         QString str = ConfigurationHelper::getString( params, prefix + "inputNeuronType", "no_delta" );
00092         if ( str == QString("no_delta") ) {
00093             inputNeuronType = 0;
00094         } else if ( str == QString("with_delta") ) {
00095             inputNeuronType = 1;
00096         } else {
00097             ConfigurationHelper::throwUserConfigError(prefix + "inputNeuronType", str, "Wrong value (use \"no_delta\" or \"with_delta\"");
00098         }
00099         int hiddenNeuronType = 0;
00100         str = ConfigurationHelper::getString( params, prefix + "hiddenNeuronType", "logistic" );
00101         if ( str == QString("logistic") ) {
00102             hiddenNeuronType = 0;
00103         } else if ( str == QString("logistic+delta") ) {
00104             hiddenNeuronType = 1;
00105         } else if ( str == QString("binary") ) {
00106             hiddenNeuronType = 2;
00107         } else if ( str == QString("logistic_0.2") ) {
00108             hiddenNeuronType = 3;
00109         } else {
00110             ConfigurationHelper::throwUserConfigError(prefix + "hiddenNeuronType", str, "Wrong value (use \"logistic\", \"logistic+delta\", \"binary\" or \"logistic_0.2\"");
00111         }
00112         int outputNeuronType = 0;
00113         str = ConfigurationHelper::getString( params, prefix + "outputNeuronType", "no_delta" );
00114         if ( str == QString("no_delta") ) {
00115             outputNeuronType = 0;
00116         } else if ( str == QString("with_delta") ) {
00117             outputNeuronType = 1;
00118         } else {
00119             ConfigurationHelper::throwUserConfigError(prefix + "outputNeuronType", str, "Wrong value (use \"no_delta\" or \"with_delta\"");
00120         }
00121         bool recurrentHiddens = ConfigurationHelper::getBool( params, prefix + "recurrentHiddens", false );
00122         bool inputOutputConnections = ConfigurationHelper::getBool( params, prefix + "inputOutputConnections", false );
00123         bool recurrentOutputs = ConfigurationHelper::getBool( params, prefix + "recurrentOutputs", false );
00124         bool biasOnHidden = ConfigurationHelper::getBool( params, prefix + "biasOnHiddenNeurons", false );
00125         bool biasOnOutput = ConfigurationHelper::getBool( params, prefix + "biasOnOutputNeurons", false );
00126         create_net_block( inputNeuronType, hiddenNeuronType, outputNeuronType, recurrentHiddens, inputOutputConnections, recurrentOutputs, biasOnHidden, biasOnOutput );
00127     } else {
00128         // load the neural network from file. If the file doesn't exists, throwing an exception
00129         if (load_net_blocks(netFile.toAscii().data(), 0) == 0) {
00130             ConfigurationHelper::throwUserConfigError(prefix + "netFile", netFile, "Could not open the specified network configuration file");
00131         }
00132     }
00133 
00134     computeParameters();
00135     // --- reallocate data on the basis of number of parameters
00136     delete[] freep;
00137     freep=new float[nparameters];
00138     for(int r=0;r<nparameters;r++)
00139         freep[r]=0.0f;
00140     
00141     delete[] phep;
00142     phep=new float[nparameters];
00143     for(int r=0;r<nparameters;r++)
00144         phep[r]=DEFAULT_VALUE; // default value correspond to dont' care
00145     
00146     delete[] muts;
00147     muts=new float[nparameters];
00148     for(int r=0;r<nparameters;r++)
00149         muts[r]=DEFAULT_VALUE; // default value correspond to dont' care
00150 
00151     if ( !netFile.isEmpty() ) {
00152         // Try to Load the file filename.phe if present in the current directory
00153         QFileInfo fileNet( netFile );
00154         QString filePhe = fileNet.baseName() + ".phe";
00155         load_net_blocks(filePhe.toAscii().data(), 1);
00156     }
00157 
00158     //resetting net
00159     resetNet();
00160 
00161     printBlocks();
00162 
00163     // we create the labels of the hidden neurons
00164     for(int i = 0; i < nhiddens; i++) {
00165         sprintf(neuronl[ninputs+i], "h%d", i);
00166         neuronrange[ninputs+i][0] = 0.0;
00167         neuronrange[ninputs+i][1] = 1.0;
00168         neurondcolor[ninputs+i] = QColor(125,125,125);
00169     }
00170 }
00171 
00172 void Evonet::save(ConfigurationParameters& params, QString prefix) {
00173     params.startObjectParameters( prefix, "Evonet", this );
00174     if ( netFile.isEmpty() ) {
00175         params.createParameter( prefix, "nSensors", QString::number(ninputs) );
00176         params.createParameter( prefix, "nHidden", QString::number(nhiddens) );
00177         params.createParameter( prefix, "nMotors", QString::number(noutputs) );
00178     } else {
00179         params.createParameter( prefix, "netFile", netFile );
00180     }
00181     params.createParameter( prefix, "weightRange", QString::number(wrange) );
00182     params.createParameter( prefix, "gainRange", QString::number(grange) );
00183     params.createParameter( prefix, "biasRange", QString::number(brange) );
00184 }
00185 
00186 void Evonet::describe( QString type ) {
00187     Descriptor d = addTypeDescription( type, "Neural Network imported from Evorobot" );
00188     d.describeInt( "nSensors" ).limits( 1, MAXN ).help( "The number of sensor neurons" );
00189     d.describeInt( "nHiddens" ).limits( 1, MAXN ).help( "The number of hidden neurons" );
00190     d.describeInt( "nMotors" ).limits( 1, MAXN ).help( "The number of motor neurons" );
00191     d.describeString( "netFile" ).help( "The file .net where is defined the architecture to load. WARNING: when this parameter is specified any other parameters will be ignored" );
00192     d.describeReal( "weightRange" ).def(5.0f).limits(1,+Infinity).help( "The synpatic weight of the neural network can only assume values in [-weightRange, +weightRange]" );
00193     d.describeReal( "gainRange" ).def(5.0f).limits(0,+Infinity).help( "The gain of a neuron will can only assume values in [0, +gainRange]" );
00194     d.describeReal( "biasRange" ).def(5.0f).limits(0,+Infinity).help( "The bias of a neuron will can only assume values in [-biasRange, +biasRange]" );
00195     d.describeEnum( "inputNeuronType" ).def("no_delta").values( QStringList() << "no_delta" << "with_delta" ).help( "The type of input neurons when the network is auto generated");
00196     d.describeEnum( "hiddenNeuronType" ).def("logistic").values( QStringList() << "logistic" << "logistic+delta" << "binary" << "logistic_0.2" ).help( "The type of hidden neurons when the network is auto generated");
00197     d.describeEnum( "outputNeuronType" ).def("no_delta").values( QStringList() << "no_delta" << "with_delta" ).help( "The type of output neurons when the network is auto generated");
00198     d.describeBool( "recurrentHiddens" ).def(false).help( "when true generated a network with recurrent hidden neurons");
00199     d.describeBool( "inputOutputConnections" ).def(false).help( "when true generated a network with input-output connections in addition to input-hidden-output connections");
00200     d.describeBool( "recurrentOutputs" ).def(false).help( "when true generated a network with recurrent output neurons");
00201     d.describeBool( "biasOnHiddenNeurons" ).def(true).help( "when true generate a network with hidden neurons with a bias");
00202     d.describeBool( "biasOnOutputNeurons" ).def(true).help( "when true generate a network with output neurons with a bias");
00203 }
00204 
00205 void Evonet::create_net_block( int inputNeuronType, int hiddenNeuronType, int outputNeuronType, bool recurrentHiddens, bool inputOutputConnections, bool recurrentOutputs, bool biasOnHidden, bool biasOnOutput )
00206 {
00207     int n;
00208     int i;
00209     int startx;
00210     int dx;
00211 
00212     // setting the neuron types
00213     for(i = 0; i < this->ninputs; i++) {
00214         this->neurontype[i]= inputNeuronType;
00215         neuronbias[i] = 0;
00216     }
00217     for(i = this->ninputs; i < (this->nneurons - this->noutputs); i++) {
00218         this->neurontype[i]= hiddenNeuronType;
00219         neuronbias[i] = (biasOnHidden) ? 1 : 0;
00220     }
00221     for(i = (this->nneurons - this->noutputs); i < this->nneurons; i++) {
00222         this->neurontype[i]= outputNeuronType;
00223         neuronbias[i] = (biasOnOutput) ? 1 : 0;
00224     }
00225 
00226     // gain
00227     for(i=0; i < this->nneurons; i++) {
00228         this->neurongain[i]= 0;
00229     }
00230 
00231     this->net_nblocks = 0;
00232     // input update block
00233     this->net_block[this->net_nblocks][0] = 1;
00234     this->net_block[this->net_nblocks][1] = 0;
00235     this->net_block[this->net_nblocks][2] = this->ninputs;
00236     this->net_block[this->net_nblocks][3] = 0;
00237     this->net_block[this->net_nblocks][4] = 0;
00238     this->net_nblocks++;
00239 
00240     // input-hidden connections
00241     if (this->nhiddens > 0) {
00242         this->net_block[this->net_nblocks][0] = 0;
00243         this->net_block[this->net_nblocks][1] = this->ninputs;
00244         this->net_block[this->net_nblocks][2] = this->nhiddens;
00245         this->net_block[this->net_nblocks][3] = 0;
00246         this->net_block[this->net_nblocks][4] = this->ninputs;
00247         this->net_nblocks++;
00248     }
00249 
00250     // hidden-hidden connections
00251     if (recurrentHiddens) {
00252         this->net_block[this->net_nblocks][0] = 0;
00253         this->net_block[this->net_nblocks][1] = this->ninputs;
00254         this->net_block[this->net_nblocks][2] = this->nhiddens;
00255         this->net_block[this->net_nblocks][3] = this->ninputs;
00256         this->net_block[this->net_nblocks][4] = this->nhiddens;
00257         this->net_nblocks++;
00258     }
00259 
00260     // hidden update block
00261     if (this->nhiddens > 0) {
00262         this->net_block[this->net_nblocks][0] = 1;
00263         this->net_block[this->net_nblocks][1] = this->ninputs;
00264         this->net_block[this->net_nblocks][2] = this->nhiddens;
00265         this->net_block[this->net_nblocks][3] = 0;
00266         this->net_block[this->net_nblocks][4] = 0;
00267         this->net_nblocks++;
00268     }
00269 
00270     // input-output connections
00271     if (this->nhiddens == 0 || inputOutputConnections) {
00272         this->net_block[this->net_nblocks][0] = 0;
00273         this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
00274         this->net_block[this->net_nblocks][2] = this->noutputs;
00275         this->net_block[this->net_nblocks][3] = 0;
00276         this->net_block[this->net_nblocks][4] = this->ninputs;
00277         this->net_nblocks++;
00278     }
00279 
00280     // hidden-output connections
00281     if (this->nhiddens > 0) {
00282         this->net_block[net_nblocks][0] = 0;
00283         this->net_block[net_nblocks][1] = this->ninputs + this->nhiddens;
00284         this->net_block[net_nblocks][2] = this->noutputs;
00285         this->net_block[net_nblocks][3] = this->ninputs;
00286         this->net_block[net_nblocks][4] = this->nhiddens;
00287         this->net_nblocks++;
00288     }
00289 
00290     // output-output connections
00291     if (recurrentOutputs) {
00292         this->net_block[this->net_nblocks][0] = 0;
00293         this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
00294         this->net_block[this->net_nblocks][2] = this->noutputs;
00295         this->net_block[this->net_nblocks][3] = this->ninputs + this->nhiddens;
00296         this->net_block[this->net_nblocks][4] = this->noutputs;
00297         this->net_nblocks++;
00298     }
00299 
00300     // output update block
00301     this->net_block[this->net_nblocks][0] = 1;
00302     this->net_block[this->net_nblocks][1] = this->ninputs + this->nhiddens;
00303     this->net_block[this->net_nblocks][2] = this->noutputs;
00304     this->net_block[this->net_nblocks][3] = 0;
00305     this->net_block[this->net_nblocks][4] = 0;
00306     this->net_nblocks++;
00307 
00308     // cartesian xy coordinate for sensory neurons for display (y=400)
00309     n = 0;
00310     dx = 30;//25
00311     if (this->ninputs > this->noutputs) {
00312         startx = 50;
00313     } else {
00314         startx = ((this->noutputs - this->ninputs) / 2) * dx + 50;
00315     }
00316     for(i = 0; i < this->ninputs; i++, n++) {
00317         this->neuronxy[n][0] = (i * dx) + startx;
00318         this->neuronxy[n][1] = 400;
00319     }
00320 
00321     // cartesian xy coordinate for internal neurons for display (y=225)
00322     startx = this->ninputs * dx;
00323     for(i=0; i < (this->nneurons - (this->ninputs + this->noutputs)); i++, n++) {
00324         this->neuronxy[n][0] = startx + (i * dx);
00325         this->neuronxy[n][1] = 225;
00326     }
00327 
00328     // cartesian xy coordinate for motor neurons for display (y=50)
00329     if (this->ninputs > this->noutputs) {
00330         startx = ((this->ninputs - this->noutputs) / 2) * dx + 50;
00331     } else {
00332         startx = 50;
00333     }
00334     for(i=0; i < this->noutputs; i++, n++) {
00335         this->neuronxy[n][0] = startx + (i * dx);
00336         this->neuronxy[n][1] = 50;
00337     }
00338 
00339     // set neurons whose activation should be displayed
00340     for(i=0; i < this->nneurons; i++) {
00341         this->neurondisplay[i] = 1;
00342     }
00343 
00344     // calculate the height and width necessary to display all created neurons (drawnymax, drawnxmax)
00345     drawnymax = 400 + 30;
00346     for(i = 0, drawnxmax = 0; i < nneurons; i++) {
00347         if (neuronxy[i][0] > drawnxmax) {
00348             drawnxmax = neuronxy[i][0];
00349         }
00350     }
00351     drawnxmax += 60;
00352 
00353     // compute the number of parameters
00354     computeParameters();
00355 
00356     //i = this->ninputs;
00357     //if (this->ninputs > i)
00358     //  i = this->noutputs;
00359     //if ((this->nneurons - this->noutputs) > i)
00360     //  i = (this->nneurons - this->noutputs);
00361     //drawnxmax = (i * dx) + dx + 30;
00362 }
00363 
00364 int Evonet::load_net_blocks(const char *filename, int mode)
00365 {
00366 
00367     FILE  *fp;
00368     int   b;
00369     int   n;
00370     int   i;
00371     float *ph;
00372     float *mu;
00373     float *p;
00374     int   np;
00375     const int bufferSize = 128;
00376     char  cbuffer[bufferSize];
00377 
00378     if ((fp = fopen(filename,"r")) != NULL)
00379     {
00380         fscanf(fp,"ARCHITECTURE\n");
00381         fscanf(fp,"nneurons %d\n", &nneurons);
00382         fscanf(fp,"nsensors %d\n", &ninputs);
00383         fscanf(fp,"nmotors %d\n", &noutputs);
00384         if (nneurons > MAXN)
00385             Logger::error( "Evonet - increase MAXN to support more than "+QString::number(MAXN)+" neurons" );
00386         nhiddens = nneurons - (ninputs + noutputs);
00387         fscanf(fp,"nblocks %d\n", &net_nblocks);
00388         for (b=0; b < net_nblocks; b++)
00389         {
00390             fscanf(fp,"%d %d %d %d %d", &net_block[b][0],&net_block[b][1],&net_block[b][2],&net_block[b][3],&net_block[b][4]);
00391             if (net_block[b][0] == 0)
00392                 fscanf(fp," // connections block\n");
00393             if (net_block[b][0] == 1)
00394                 fscanf(fp," // block to be updated\n");
00395             if (net_block[b][0] == 2)
00396                 fscanf(fp," // gain block\n");
00397             if (net_block[b][0] == 3)
00398                 fscanf(fp," // modulated gain block\n");
00399         }
00400         fscanf(fp,"neurons bias, delta, gain, xy position, display\n");
00401         drawnxmax = 0;
00402         drawnymax = 0;
00403         for(n=0; n < nneurons; n++)
00404         {
00405             fscanf(fp,"%d %d %d %d %d %d\n", &neuronbias[n], &neurontype[n], &neurongain[n], &neuronxy[n][0], &neuronxy[n][1], &neurondisplay[n]);
00406             if(drawnxmax < neuronxy[n][0])
00407                 drawnxmax = neuronxy[n][0];
00408             if(drawnymax < neuronxy[n][1])
00409                 drawnymax = neuronxy[n][1];
00410         }
00411         drawnxmax += 30;
00412         drawnymax += 30;
00413         
00414         if (mode == 1)
00415         {
00416             fscanf(fp,"FREE PARAMETERS %d\n", &np);
00417             if (nparameters != np) {
00418                 Logger::error(QString("ERROR: parameters defined are %1 while %2 contains %3 parameters").arg(nparameters).arg(filename).arg(np));
00419             }
00420             i = 0;
00421             ph = phep;
00422             mu = muts;
00423             p = freep;
00424 
00425             while (fgets(cbuffer,bufferSize,fp) != NULL && i < np)
00426             {
00427                 //read values from line
00428                 QString line = cbuffer;
00429                 QStringList lineContent = line.split(QRegExp("\\s+"), QString::SkipEmptyParts);
00430 
00431                 bool floatOnSecondPlace = false;
00432                 lineContent[1].toFloat(&floatOnSecondPlace);
00433                 
00434                 if(lineContent.contains("*") || floatOnSecondPlace)
00435                     readNewPheLine(lineContent, ph, mu);
00436                 else
00437                     readOldPheLine(lineContent, ph, mu);
00438 
00439                 //refresh parameters
00440                 *p = *ph;
00441 
00442                 i++;
00443                 mu++;
00444                 ph++;
00445                 p++;
00446             }
00447             pheloaded = true;
00448         }
00449         fclose(fp);
00450 
00451         Logger::info( "Evonet - loaded file " + QString(filename) );
00452         return(1);
00453     }
00454     else
00455     {
00456         Logger::warning( "Evonet - File " + QString(filename) + " not found" );
00457         return(0);
00458     }
00459 }
00460 
00461 void Evonet::readOldPheLine(QStringList line, float* par, float* mut)
00462 {
00463     *par = line[0].toFloat();
00464 
00465     if(*par != DEFAULT_VALUE) { //no mutations
00466         *mut = 0;
00467     }
00468 }
00469 
00470 void Evonet::readNewPheLine(QStringList line, float* par, float* mut)
00471 {
00472     if(line[0] == "*") {
00473         *par = DEFAULT_VALUE; //start at random
00474     } else {
00475         //error handling
00476         *par = line[0].toFloat();
00477     }
00478 
00479     if(line[1] == "*") {
00480         *mut = DEFAULT_VALUE;
00481     } else {
00482         *mut = line[1].toFloat();
00483     }
00484 }
00485 
00486 /*
00487  * It save the architecture and also the parameters (when mode =1)
00488  */
00489 void Evonet::save_net_blocks(const char *filename, int mode)
00490 {
00491     FILE *fp;
00492     int b;
00493     int n;
00494     int i;
00495     int t;
00496 
00497     char* default_string = "*\t\t";
00498     char **p = new char*[freeParameters()];
00499     char **mu = new char*[freeParameters()];
00500     for(int h=0; h<freeParameters(); h++) {
00501         mu[h] = new char[50];
00502         p[h] = new char[50];
00503         
00504         if(muts[h] == DEFAULT_VALUE) {
00505             mu[h] = default_string;
00506         } else {
00507             sprintf(mu[h], "%f", muts[h]);
00508         }
00509 
00510         if(freep[h] == DEFAULT_VALUE) {
00511             p[h] = default_string;
00512         } else {
00513             sprintf(p[h], "%f", freep[h]);
00514         }
00515     }
00516 
00517     if ((fp = fopen(filename,"w")) != NULL) {
00518         fprintf(fp,"ARCHITECTURE\n");
00519         fprintf(fp,"nneurons %d\n", nneurons);
00520         fprintf(fp,"nsensors %d\n", ninputs);
00521         fprintf(fp,"nmotors %d\n", noutputs);
00522         fprintf(fp,"nblocks %d\n", net_nblocks);
00523         for (b = 0; b < net_nblocks; b++) {
00524             fprintf(fp,"%d %d %d %d %d", net_block[b][0],net_block[b][1],net_block[b][2],net_block[b][3],net_block[b][4]);
00525             if (net_block[b][0] == 0) {
00526                 fprintf(fp," // connections block\n");
00527             } else if (net_block[b][0] == 1) {
00528                 fprintf(fp," // block to be updated\n");
00529             } else if (net_block[b][0] == 2) {
00530                 fprintf(fp," // gain block\n");
00531             } else if (net_block[b][0] == 3) {
00532                 fprintf(fp," // modulated gain block\n");
00533             }
00534         }
00535         fprintf(fp,"neurons bias, delta, gain, xy position, display\n");
00536         for(n = 0; n < nneurons; n++) {
00537             fprintf(fp,"%d %d %d %d %d %d\n", neuronbias[n], neurontype[n], neurongain[n], neuronxy[n][0], neuronxy[n][1], neurondisplay[n]);
00538         }
00539 
00540         computeParameters();
00541         if (mode == 1) {
00542             fprintf(fp,"FREE PARAMETERS %d\n", nparameters);
00543             for(i = 0; i < nneurons; i++) {
00544                 if (neurongain[i] == 1) {
00545                     fprintf(fp,"%s \t %s \tgain %s\n",*p, *mu, neuronl[i]);
00546                     p++;
00547                     mu++;
00548                 }
00549             }
00550             for(i=0; i<nneurons; i++) {
00551                 if (neuronbias[i] == 1) {
00552                     fprintf(fp,"%s \t %s \tbias %s\n",*p, *mu, neuronl[i]);
00553                     p++;
00554                     mu++;
00555                 }
00556             }
00557             for (b=0; b < net_nblocks; b++) {
00558                 if (net_block[b][0] == 0) {
00559                     for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
00560                         for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
00561                             fprintf(fp,"%s \t %s \tweight %s from %s\n",*p, *mu, neuronl[t], neuronl[i]);
00562                             p++;
00563                             mu++;
00564                         }
00565                     }
00566                 } else if (net_block[b][0] == 1) {
00567                     for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
00568                         if (neurontype[t] == 1) {
00569                             float timeC = 0;
00570                             if(*p != default_string) {
00571                                 timeC = atof(*p);
00572                                 timeC = fabs(timeC)/wrange;  //(timeC + wrange)/(wrange*2);
00573                             }
00574 
00575                             fprintf(fp,"%s \t %s \ttimeconstant %s (%f)\n", *p, *mu, neuronl[t], timeC);
00576                             p++;
00577                             mu++;
00578                         }
00579                     }
00580                 }
00581             }
00582         }
00583         fprintf(fp,"END\n");
00584 
00585         Logger::info( "Evonet - controller saved on file " + QString(filename) );
00586     } else {
00587         Logger::error( "Evonet - unable to create the file " + QString(filename) );
00588     }
00589     fclose(fp);
00590 }
00591 
00592 /*
00593  * standard logistic
00594  */
00595 float Evonet::logistic(float f)
00596 {
00597     return((float) (1.0 / (1.0 + exp(0.0 - f))));
00598 }
00599 
00600 /*
00601  * compute the number of free parameters
00602  */
00603 void Evonet::computeParameters()
00604 {
00605     int i;
00606     int t;
00607     int b;
00608     int updated[MAXN];
00609     int ng;
00610     int nwarnings;
00611 
00612     ng  = 0;
00613     for(i=0;i < nneurons;i++) {
00614         updated[i] = 0;
00615     }
00616     // gain
00617     for(i=0;i < nneurons;i++) {
00618         if (neurongain[i] == 1) {
00619             ng++;
00620         }
00621     }
00622     // biases
00623     for(i=0;i < nneurons;i++) {
00624         if (neuronbias[i] == 1) {
00625             ng++;
00626         }
00627     }
00628     // timeconstants
00629     for(i=0;i < nneurons;i++) {
00630         if (neurontype[i] == 1) {
00631             ng++;
00632         }
00633     }
00634     // blocks
00635     for (b=0; b < net_nblocks; b++) {
00636         // connection block
00637         if (net_block[b][0] == 0) {
00638             for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
00639                 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
00640                     ng++;
00641                 }
00642             }
00643         }
00644     }
00645 
00646     nwarnings = 0;
00647     for(i=0;i < nneurons;i++) {
00648         if (updated[i] < 1 && nwarnings == 0) {
00649             Logger::warning( "Evonet - neuron " + QString::number(i) + " will never be activated according to the current architecture" );
00650             nwarnings++;
00651         }
00652         if (updated[i] > 1 && nwarnings == 0) {
00653             Logger::warning( "Evonet - neuron " + QString::number(i) + " will be activated more than once according to the current architecture" );
00654             nwarnings++;
00655         }
00656     }
00657     nparameters=ng; // number of parameters
00658 }
00659 
00660 void Evonet::updateNet()
00661 {
00662     int i;
00663     int t;
00664     int b;
00665     float *p;
00666     float delta;
00667     float netinput[MAXN];
00668     float gain[MAXN];
00669 
00670     p  = freep;
00671     //nl  = neuronlesion;
00672 
00673     // gain
00674     for(i=0;i < nneurons;i++) {
00675         if (neurongain[i] == 1) {
00676             gain[i] = (float) (fabs((double) *p) / wrange) * grange;
00677             p++;
00678         } else {
00679             gain[i] = 1.0f;
00680         }
00681     }
00682     // biases
00683     for(i=0;i < nneurons;i++) {
00684         if (neuronbias[i] == 1) {
00685             netinput[i] = ((double)*p/wrange)*brange;
00686             p++;
00687         } else {
00688             netinput[i] = 0.0f;
00689         }
00690     }
00691 
00692     // blocks
00693     for (b=0; b < net_nblocks; b++) {
00694         // connection block
00695         if (net_block[b][0] == 0) {
00696             for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
00697                 for(i=net_block[b][3]; i < net_block[b][3] + net_block[b][4];i++) {
00698                     netinput[t] += act[i] * gain[i] * *p;
00699                     p++;
00700                 }
00701             }
00702         }
00703         // gain block (gain of neuron a-b set equal to gain of neuron a)
00704         if (net_block[b][0] == 2) {
00705             for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
00706                 gain[t] = gain[net_block[b][1]];
00707             }
00708         }
00709         // gain block (gain of neuron a-b set equal to act[c])
00710         if (net_block[b][0] == 3) {
00711             for(t=net_block[b][1]; t < net_block[b][1] + net_block[b][2];t++) {
00712                 gain[t] = act[net_block[b][3]];
00713             }
00714         }
00715         // update block
00716         if (net_block[b][0] == 1) {
00717             for(t=net_block[b][1]; t < (net_block[b][1] + net_block[b][2]); t++) {
00718                 if (t < ninputs) {
00719                     switch(neurontype[t]) {
00720                         case 0: // simple rely units
00721                             act[t] = input[t];
00722                             break;
00723                         case 1: // delta neurons
00724                             delta = (float) (fabs((double) *p) / wrange);
00725                             p++;
00726                             act[t] = (act[t] * delta)  + (input[t] * (1.0f - delta));
00727                         break;
00728                     }
00729                     if(neuronlesion[t]) {
00730                         act[t]= (float)neuronlesionVal[t];//0.0 //lesion code onof
00731                     }
00732                 } else {
00733                     switch(neurontype[t]) {
00734                         case 0: // simple logistic
00735                             act[t] = logistic(netinput[t]);
00736                             delta = 0.0;
00737                             break;
00738                         case 1: // delta neurons
00739                             delta = (float) (fabs((double) *p) / wrange);
00740                             p++;
00741                             act[t] = (act[t] * delta)  + (logistic(netinput[t]) * (1.0f - delta));
00742                             break;
00743                         case 2: // binary neurons
00744                             if (netinput[t] >= 0.0) {
00745                                 act[t] = 1.0;
00746                             } else {
00747                                 act[t] = 0.0;
00748                             }
00749                             break;
00750                         case 3: // logistic2 neurons
00751                             act[t] = logistic(netinput[t]*0.2f);
00752                             delta = 0.0;
00753                             break;
00754                     }
00755                     if(neuronlesion[t]) {
00756                         act[t]= (float)neuronlesionVal[t];//0.0; //lesion code onof
00757                     }
00758                 }
00759             }
00760         }
00761     }
00762 
00763     // Storing the current activations
00764     memcpy(storedActivations[nextStoredActivation], act, nneurons * sizeof(float));
00765     nextStoredActivation = (nextStoredActivation + 1) % MAXSTOREDACTIVATIONS;
00766     if (firstStoredActivation == nextStoredActivation) {
00767         // We have filled the circular buffer, discarding the oldest activation
00768         firstStoredActivation += (firstStoredActivation + 1) % MAXSTOREDACTIVATIONS;
00769     }
00770 
00771     emit evonetUpdated();
00772 }
00773 
00774 int Evonet::setInput(int inp, float value)
00775 {
00776     if (inp>=ninputs || inp<0) {
00777         return -1;// exceding sensor number
00778     }
00779     input[inp]=value;
00780     return 0;
00781 }
00782 
00783 float Evonet::getOutput(int out)
00784 {
00785     if(out>=noutputs) {
00786         return -1; //exceeding out numbers
00787     }
00788     return act[ninputs+nhiddens+out];
00789 }
00790 
00791 float Evonet::getInput(int in)
00792 {
00793     return this->input[in];
00794 }
00795 
00796 float Evonet::getNeuron(int in)
00797 {
00798     return act[in];
00799 }
00800 
00801 void Evonet::resetNet()
00802 {
00803     int i;
00804     for (i = 0; i < MAXN; i++) {
00805         act[i]=0.0;
00806         netinput[i]=0.0;
00807         input[i]=0.0;
00808     }
00809 }
00810 
00811 void Evonet::injectHidden(int nh, float val)
00812 {
00813     if(nh<nhiddens) {
00814         act[this->ninputs+nh] = val;
00815     }
00816 }
00817 
00818 float Evonet::getHidden(int h)
00819 {
00820     if(h<nhiddens && h>=0) {
00821         return act[this->ninputs+h];
00822     } else {
00823         return -999;
00824     }
00825 }
00826 
00827 int Evonet::freeParameters()
00828 {
00829     return this->nparameters;
00830 }
00831 
00832 bool Evonet::pheFileLoaded()
00833 {
00834     return pheloaded;
00835 }
00836 
00837 /*
00838  * Copy parameters from genotype
00839  */
00840 void Evonet::getParameters(const int *dt)
00841 {
00842     int i;
00843     float *p;
00844 
00845     p = freep;
00846     for (i=0; i<freeParameters(); i++, p++) {
00847         *p = wrange - ((float)dt[i]/geneMaxValue)*wrange*2;
00848     }
00849 }
00850 
00851 void Evonet::getMutations(float* GAmut)
00852 {
00853     //copy mutation vector
00854     for(int i=0; i<freeParameters(); i++) {
00855         GAmut[i] = muts[i];
00856     }
00857 }
00858 
00859 void Evonet::copyPheParameters(int* pheGene)
00860 {
00861     for(int i=0; i<freeParameters(); i++)
00862     {
00863         if(phep[i] == DEFAULT_VALUE) {
00864             pheGene[i] = DEFAULT_VALUE;
00865         } else {
00866             pheGene[i] = (int)((wrange - phep[i])*geneMaxValue/(2*wrange));
00867         }
00868     }
00869 }
00870 
00871 void Evonet::printIO()
00872 {
00873     QString output;
00874 
00875     output = "In: ";
00876     for (int in = 0; in < this->ninputs; in++) {
00877         output += QString("%1 ").arg(this->input[in], 0, 'f', 3);
00878     }
00879     output += "Hid: ";
00880     for (int hi = this->ninputs; hi < (this->nneurons - this->noutputs); hi++) {
00881         output += QString("%1 ").arg(this->act[hi], 0, 'f', 3);
00882     }
00883     output += "Out: ";
00884     for (int out = 0; out < this->noutputs; out++) {
00885         output += QString("%1 ").arg(this->act[this->ninputs+this->nhiddens+out], 0, 'f', 3);
00886     }
00887 
00888     Logger::info(output);
00889 
00890 }
00891 
00892 int Evonet::getParamBias(int nbias)
00893 {
00894     int pb=-999; // if remain -999 it means nbias is out of range
00895     if (nbias<nparambias && nbias>-1) {
00896         pb=(int) freep[nparambias+nbias];
00897     }
00898     return pb;
00899 }
00900 
00901 float Evonet::getWrange()
00902 {
00903     return wrange;
00904 }   
00905 
00906 
00907 void Evonet::printBlocks()
00908 {
00909     Logger::info("Evonet - ninputs " + QString::number(this->ninputs));
00910     Logger::info("Evonet - nhiddens " + QString::number(this->nhiddens));
00911     Logger::info("Evonet - noutputs " + QString::number(this->noutputs));
00912     Logger::info("Evonet - nneurons " + QString::number(this->nneurons));
00913 
00914     for(int i=0;i<this->net_nblocks;i++) {
00915         Logger::info( QString( "Evonet Block - %1 | %2 - %3 -> %4 - %5" )
00916                     .arg(net_block[i][0])
00917                     .arg(net_block[i][1])
00918                     .arg(net_block[i][2])
00919                     .arg(net_block[i][3])
00920                     .arg(net_block[i][4]) );
00921     }
00922 }
00923 
00924 int Evonet::getNoInputs()
00925 {
00926     return ninputs;
00927 }
00928 
00929 int Evonet::getNoHiddens()
00930 {
00931     return nhiddens;
00932 }
00933 
00934 int Evonet::getNoOutputs()
00935 {
00936     return noutputs;
00937 }
00938 
00939 int Evonet::getNoNeurons()
00940 {
00941     return nneurons;
00942 }
00943 
00944 void Evonet::setRanges(double weight, double bias, double gain)
00945 {
00946     wrange=weight;
00947     brange=bias;
00948     grange=gain;
00949 }
00950 
00951 float* Evonet::getOldestStoredActivations()
00952 {
00953     if (firstStoredActivation == nextStoredActivation) {
00954         return NULL;
00955     }
00956 
00957     const int ret = firstStoredActivation;
00958     firstStoredActivation = (firstStoredActivation + 1) % MAXSTOREDACTIVATIONS;
00959 
00960     return storedActivations[ret];
00961 }
00962 
00963 } // end namespace farsa
00964 
00965 // All the suff below is to restore the warning state on Windows
00966 #if defined(_MSC_VER)
00967     #pragma warning(pop)
00968 #endif