learningalgorithm.cpp
1 /********************************************************************************
2  * Neural Network Framework. *
3  * Copyright (C) 2005-2011 Gianluca Massera <emmegian@yahoo.it> *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 2 of the License, or *
8  * (at your option) any later version. *
9  * *
10  * This program is distributed in the hope that it will be useful, *
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13  * GNU General Public License for more details. *
14  * *
15  * You should have received a copy of the GNU General Public License *
16  * along with this program; if not, write to the Free Software *
17  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
18  ********************************************************************************/
19 
20 #include "neuralnet.h"
21 #include "learningalgorithm.h"
22 #include "factory.h"
23 
24 namespace farsa {
25 
26 void Pattern::setInputsOf( Cluster* cl, const DoubleVector& ins ) {
27  pinfo[cl].inputs.resize( cl->numNeurons() );
28  pinfo[cl].inputs.copyValues( ins );
29 };
30 
31 void Pattern::setOutputsOf( Cluster* cl, const DoubleVector& ous ) {
32  pinfo[cl].outputs.resize( cl->numNeurons() );
33  pinfo[cl].outputs.copyValues( ous );
34 };
35 
36 void Pattern::setInputsOutputsOf( Cluster* cl, const DoubleVector& ins, const DoubleVector& ous ) {
37  pinfo[cl].inputs.resize( cl->numNeurons() );
38  pinfo[cl].inputs.copyValues( ins );
39  pinfo[cl].outputs.resize( cl->numNeurons() );
40  pinfo[cl].outputs.copyValues( ous );
41 };
42 
44  if ( pinfo.count(cl) != 0 ) {
45  return pinfo[cl].inputs;
46  } else {
47  return DoubleVector();
48  }
49 };
50 
52  if ( pinfo.count(cl) != 0 ) {
53  return pinfo[cl].outputs;
54  } else {
55  return DoubleVector();
56  }
57 };
58 
60  return pinfo[cl];
61 };
62 
63 void Pattern::configure(ConfigurationParameters& params, QString prefix) {
64  //--- get all parameters with the prefix 'cluster:'
65  QStringList clusterList = params.getParametersWithPrefixList( prefix, "cluster:" );
66  foreach( QString cluster, clusterList ) {
67  QString id = cluster.split(':')[1];
68  if ( id.isEmpty() ) continue;
69  //--- now, it check if there is a inputs and outputs parameter and load it
70  QString str = params.getValue( prefix + "inputs:" + id );
71  DoubleVector inputs;
72  if (!str.isEmpty()) {
73  QStringList list = str.split(QRegExp("\\s+"), QString::SkipEmptyParts);
74  for( int i=0; i<list.size(); i++) {
75  inputs.append( list[i].toDouble() );
76  }
77  }
78  str = params.getValue( prefix + "outputs:" + id );
79  DoubleVector outputs;
80  if (!str.isEmpty()) {
81  QStringList list = str.split(QRegExp("\\s+"), QString::SkipEmptyParts);
82  for( int i=0; i<list.size(); i++) {
83  outputs.append( list[i].toDouble() );
84  }
85  }
86  if ( inputs.size() == 0 && outputs.size() == 0 ) continue;
87  Cluster* cl = params.getObjectFromParameter<Cluster>( prefix+cluster, false, true );
88  if ( inputs.size() > 0 ) {
89  setInputsOf( cl, inputs );
90  }
91  if ( outputs.size() > 0 ) {
92  setOutputsOf( cl, outputs );
93  }
94  }
95 }
96 
97 void Pattern::save(ConfigurationParameters& params, QString prefix) {
98  params.startObjectParameters(prefix, "Pattern", this);
99  QString tmpl = "%1:%2";
100  QList<Cluster*> cls = pinfo.keys();
101  for( int i=0; i<cls.size(); i++ ) {
102  PatternInfo& info = pinfo[ cls[i] ];
103  params.createParameter(prefix, tmpl.arg("cluster").arg(i), cls[i]);
104  if ( info.inputs.size() > 0 ) {
105  QStringList list;
106  for( unsigned int j=0; j<info.inputs.size(); j++ ) {
107  list.push_back(QString::number(info.inputs[j]));
108  }
109  params.createParameter(prefix, tmpl.arg("inputs").arg(i), list.join(" "));
110  }
111  if ( info.outputs.size() > 0 ) {
112  QStringList list;
113  for( unsigned int j=0; j<info.outputs.size(); j++ ) {
114  list.push_back(QString::number(info.outputs[j]));
115  }
116  params.createParameter(prefix, tmpl.arg("outputs").arg(i), list.join(" "));
117  }
118  }
119 }
120 
121 void Pattern::describe( QString type ) {
122  Descriptor d = addTypeDescription( type, "Represent a pattern of inputs/outputs for Clusters", "A Pattern is specified by groups of three parameters: cluster, inputs and outputs. The inputs and outputs parameters specify the values to set on the neurons of the cluster specified by the corresponding cluster parameter. The inputs and outputs parameter are not mandatory but specify a cluster without setting inputs or outputs has no effect" );
123  d.describeObject( "cluster" ).type( "Cluster" ).props( IsMandatory | AllowMultiple ).help( "The Cluster on which the inputs and outputs parameters referes" );
124  d.describeReal( "inputs" ).props( IsList | AllowMultiple ).help( "The values to set on the cluster's input neurons" );
125  d.describeReal( "outputs" ).props( IsList | AllowMultiple ).help( "The values to set on the cluster's output neurons" );
126 }
127 
130  this->netp = net;
131 }
132 
135  this->netp = NULL;
136 }
137 
139 }
140 
141 PatternSet LearningAlgorithm::loadPatternSet( ConfigurationParameters& params, QString path, QString prefix ) {
143  //--- convert to PatternSet
144  PatternSet patternSet;
145  foreach( QString group, params.getGroupsWithPrefixList(path, prefix) ) {
146  patternSet << *(params.getObjectFromGroup<Pattern>( path + "/" + group ));
147  }
148 #ifdef __GNUC__
149  #warning Se patternSet copia il Pattern creato all interno, allora quelli creati qui creano un leak perche non vengono mai distrutti !!
150 #endif
152  return patternSet;
153 }
154 
155 void LearningAlgorithm::savePatternSet( PatternSet& set, ConfigurationParameters& params, QString prefix ) {
156  QString tmpl = prefix+":%1";
157  for( int i=0; i<set.size(); i++ ) {
158  QString group = tmpl.arg(i);
159  params.createGroup( group );
160  set[i].save( params, group );
161  }
162 }
163 
164 }