00001 # include <qmessagebox.h>
00002 # include <network.h>
00003 # include <iostream>
00004 # include <list>
00005 # include <learningThread.h>
00006 # include <randomc.h>
00007
00008 extern LearningThread lt;
00009 extern TRandomMersenne *rg ;
00010
00011 using namespace std ;
00012
00013 Network::Network() {
00014 nextFreeId = 0;
00015 input = new Layer(nextFreeId);
00016 nextFreeId++;
00017 output = new Layer(nextFreeId);
00018 nextFreeId++;
00019 hidden = new list<Layer*>();
00020 axons = new list<Axon*>();
00021
00022 vstep = 1.0 ;
00023 neursize = 0.1 ;
00024 axsize = 0.01 ;
00025 shiny = 32 ;
00026
00027 k = 1.0 ;
00028
00029 displayaxons_ = true ;
00030 displayneurons_ = true ;
00031 displaylayers_ = true ;
00032 displayvalues_ = false ;
00033
00034 errorEvolution = new vector<double>();
00035 currentAverageError = 0.0;
00036 }
00037
00038 Layer* Network::getInputLayer( void ) { return input ; }
00039
00040 Layer* Network::getOutputLayer( void ) { return output ; }
00041
00042 list<Layer*>* Network::getHiddenLayers( void ) { return hidden ; }
00043
00044 list<Axon*>* Network::getAxons( void ) { return axons ; }
00045
00046 int Network::getNbNeurons( int id_layer ) {
00047 if (id_layer == input->getId()) {
00048 return input->nbNeurons() ;
00049 } else if (id_layer == output->getId()) {
00050 return output->nbNeurons() ;
00051 } else {
00052 for( list<Layer*>::iterator iter = hidden->begin() ; iter != hidden->end() ; iter++ ) {
00053 if( (*iter)->getId() == id_layer )
00054 return (*iter)->nbNeurons() ;
00055 }
00056 }
00057 return 0 ;
00058 }
00059
00060 int Network::getHiddenLayerId(int i) {
00061 if ((unsigned int)(i-1) < hiddenLayersId.size() && (i-1) >= 0) return hiddenLayersId[i-1];
00062 else return -1;
00063 }
00064
00065 int Network::getHiddenLayerPos( int id ) {
00066
00067
00068 vector<int>::iterator iter;
00069 int pos = 1 ;
00070 for (iter = hiddenLayersId.begin(); iter != hiddenLayersId.end(); iter++) {
00071 if ((*iter) == id) return pos ;
00072 pos++ ;
00073 }
00074 return -1 ;
00075 }
00076
00077 Layer* Network::getLayer(int id_l) {
00078 if (id_l == input->getId()) return input;
00079
00080 if (id_l == output->getId()) return output;
00081
00082 list<Layer*>::iterator iter;
00083 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00084 if ((*iter)->getId() == id_l) return (*iter);
00085
00086 return NULL;
00087 }
00088
00089 Neuron* Network::getNeuronFromId(int id_neur) {
00090 if (input->existNeuron(id_neur)) return input->getNeuronFromId(id_neur);
00091
00092 else if (output->existNeuron(id_neur)) return output->getNeuronFromId(id_neur);
00093
00094 else {
00095 list<Layer*>::iterator iter;
00096 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00097 if ((*iter)->existNeuron(id_neur)) return (*iter)->getNeuronFromId(id_neur);
00098 }
00099
00100 QMessageBox::warning(0, "Warning", "getNeuronFromId : The neuron does not exist...");
00101 return NULL;
00102 }
00103
00104 int Network::getLayerNumTheNeuronIsOn( int id_neur ) {
00105 if (input->existNeuron(id_neur)) return 0;
00106
00107 int num = 1 ;
00108 list<Layer*>::iterator iter;
00109 for (iter = hidden->begin(); iter != hidden->end(); iter++, num++)
00110 if ((*iter)->existNeuron(id_neur)) return num ;
00111
00112 if (output->existNeuron(id_neur)) return nbHiddenLayers()+1 ;
00113
00114 return -1 ;
00115 }
00116
00117 int Network::getLayerNumFromLayerId( int id_layer ) {
00118 if( input->getId() == id_layer ) return 0 ;
00119 int num = 1 ;
00120 list<Layer*>::iterator iter;
00121 for (iter = hidden->begin(); iter != hidden->end(); iter++, num++)
00122 if ((*iter)->getId() == id_layer ) return num ;
00123 if (output->getId() == id_layer) return nbHiddenLayers()+1 ;
00124 return -1 ;
00125 }
00126
00127 float Network::getK() { return k; }
00128
00129 void Network::setK(float _k) { k = _k; }
00130
00131 int Network::getEPS() { return EPS; }
00132
00133 void Network::setEPS(int e) { EPS = e; }
00134
00135 int Network::getCurrentEpoch() { return currentEpoch; }
00136
00137 double Network::getCurrentAverageError() { return currentAverageError; }
00138
00139 vector<double>* Network::getErrorEvolution() { return errorEvolution; }
00140
00141 int Network::shininess( void ) {
00142 return shiny ;
00143 }
00144
00145 float Network::neuronsize( void ) {
00146 return neursize ;
00147 }
00148
00149 float Network::axonsize( void ) {
00150 return axsize ;
00151 }
00152
00153 bool Network::displayaxons(void) {
00154 return displayaxons_ ;
00155 }
00156
00157 bool Network::displayneurons(void) {
00158 return displayneurons_ ;
00159 }
00160
00161 bool Network::displaylayers(void) {
00162 return displaylayers_ ;
00163 }
00164
00165 bool Network::displayvalues(void) {
00166 return displayvalues_ ;
00167 }
00168
00169 void Network::setDisplayAxons( bool display ) {
00170 displayaxons_ = display ;
00171 }
00172
00173 void Network::setDisplayNeurons( bool display ) {
00174 displayneurons_ = display ;
00175 }
00176
00177 void Network::setDisplayLayers( bool display ) {
00178 displaylayers_ = display ;
00179 }
00180
00181 void Network::setDisplayValues( bool display ) {
00182 displayvalues_ = display ;
00183 }
00184
00185 float Network::getVStep( void ) {
00186 return vstep ;
00187 }
00188
00189 ManipulatedFrame * Network::frame( int id ) {
00190 if( input->frame(id) != NULL ) return input->frame(id) ;
00191 if( output->frame(id) != NULL ) return output->frame(id) ;
00192
00193 list<Layer*>::iterator iter;
00194 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00195 if( (*iter)->frame(id) != NULL ) return (*iter)->frame(id) ;
00196
00197
00198
00199 return NULL ;
00200 }
00201
00202 void Network::setShininess( int shiny ) {
00203 this->shiny = shiny ;
00204 }
00205
00206 void Network::draw( int selected, const bool names ) {
00207 list<Layer*>::iterator layeriter;
00208 input->drawNeurons( selected, names ) ;
00209 for (layeriter = hidden->begin(); layeriter != hidden->end(); layeriter++)
00210 (*layeriter)->drawNeurons( selected, names ) ;
00211 output->drawNeurons( selected, names ) ;
00212 list<Axon*>::iterator axoniter;
00213 for (axoniter = axons->begin(); axoniter != axons->end(); axoniter++)
00214 (*axoniter)->draw( selected, names ) ;
00215 input->draw( selected, names ) ;
00216 for (layeriter = hidden->begin(); layeriter != hidden->end(); layeriter++)
00217 (*layeriter)->draw( selected, names ) ;
00218 output->draw( selected, names ) ;
00219 }
00220
00221 int Network::nbHiddenLayers() {
00222 return hidden->size();
00223 }
00224
00225 int Network::nbLayers() {
00226 return hidden->size() + 2;
00227 }
00228
00229 void Network::addHiddenLayer() {
00230 Layer *l = new Layer(nextFreeId);
00231 hiddenLayersId.push_back(nextFreeId);
00232 nextFreeId++;
00233 hidden->push_back(l);
00234 }
00235
00236 void Network::addHiddenLayer(int id) {
00237 Layer *l = new Layer(id);
00238 hiddenLayersId.push_back(id);
00239 hidden->push_back(l);
00240 }
00241
00242 void Network::addNHiddenLayers( int n ) {
00243 for( int i = 0 ; i < n ; i++ )
00244 addHiddenLayer() ;
00245 }
00246
00247 void Network::removeHiddenLayer(int id_layer) {
00248 if (existLayer(id_layer)) {
00249 bool found = false;
00250 list<Layer*>::iterator iter = hidden->begin();
00251 while (iter != hidden->end() && !found) {
00252 if (id_layer == (*iter)->getId()) found = true;
00253 if (!found) iter++;
00254 }
00255 (*iter)->removeNeurons();
00256 hidden->erase(iter);
00257 delete (*iter);
00258 found = false;
00259 vector<int>::iterator iter2 = hiddenLayersId.begin();
00260 while (iter2 != hiddenLayersId.end() && !found) {
00261 if (id_layer == (*iter2)) found = true;
00262 if (!found) iter2++;
00263 }
00264 hiddenLayersId.erase(iter2);
00265 }
00266
00267 }
00268
00269 void Network::clearLayer(int id_layer) {
00270 if (id_layer == input->getId()) input->removeNeurons();
00271
00272 else if (id_layer == output->getId()) output->removeNeurons();
00273
00274 else {
00275 if (existLayer(id_layer)) {
00276 bool found = false;
00277 list<Layer*>::iterator iter = hidden->begin();
00278 while(iter != hidden->end() && !found) {
00279 if (id_layer == (*iter)->getId()) { (*iter)->removeNeurons(); found = true; }
00280 if (!found) iter++;
00281 }
00282 }
00283 else QMessageBox::warning(0, "Warning", "clearLayer : The layer does not exist...");
00284 }
00285 }
00286
00287 void Network::clearHiddenLayers() {
00288 list<Layer*>::iterator iter;
00289 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00290 clearLayer((*iter)->getId());
00291 hidden->clear();
00292 hiddenLayersId.clear();
00293 }
00294
00295 bool Network::existLayer(int id_layer) {
00296 if (id_layer == input->getId() || id_layer == output->getId()) return true;
00297
00298 list<Layer*>::iterator iter;
00299 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00300 if (id_layer == (*iter)->getId()) return true;
00301
00302 return false;
00303 }
00304
00305 void Network::addNeuron(int id_layer) {
00306 if (id_layer == input->getId()) { input->addNeuron(nextFreeId); nextFreeId++; }
00307
00308 else if (id_layer == output->getId()) { output->addNeuron(nextFreeId); nextFreeId++; }
00309
00310 else {
00311 if (existLayer(id_layer)) {
00312 bool found = false;
00313 list<Layer*>::iterator iter = hidden->begin();
00314 while(iter != hidden->end() && !found) {
00315 if (id_layer == (*iter)->getId()) { (*iter)->addNeuron(nextFreeId); nextFreeId++; found = true; }
00316 if (!found) iter++;
00317 }
00318 }
00319 else QMessageBox::warning(0, "Warning", "addNeuron : The layer does not exist...");
00320 }
00321 }
00322
00323 void Network::addNeuron(int id_layer, int id, float x, float z) {
00324 if (id_layer == input->getId()) { input->addNeuron(id, x, z); }
00325
00326 else if (id_layer == output->getId()) { output->addNeuron(id, x, z); }
00327
00328 else {
00329 if (existLayer(id_layer)) {
00330 bool found = false;
00331 list<Layer*>::iterator iter = hidden->begin();
00332 while(iter != hidden->end() && !found) {
00333 if (id_layer == (*iter)->getId()) { (*iter)->addNeuron(id, x, z); found = true; }
00334 if (!found) iter++;
00335 }
00336 }
00337 else QMessageBox::warning(0, "Warning", "addNeuron : The layer does not exist...");
00338 }
00339 }
00340
00341 void Network::addNNeurons(int id_layer, int n) {
00342 for(int i=0; i<n; i++)
00343 addNeuron(id_layer);
00344 }
00345
00346 void Network::removeNeuron(int id_neur) {
00347 if (input->existNeuron(id_neur)) input->removeNeuron(id_neur);
00348
00349 else if (output->existNeuron(id_neur)) output->removeNeuron(id_neur);
00350
00351 else {
00352 list<Layer*>::iterator iter;
00353 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00354 if ((*iter)->existNeuron(id_neur)) { (*iter)->removeNeuron(id_neur); return; }
00355 QMessageBox::warning(0, "Warning", "removeNeuron : The neuron does not exist...");
00356 }
00357 }
00358
00359 int Network::nbNeurons( void ) {
00360 int nb = 0 ;
00361 nb += input->nbNeurons() ;
00362 nb += output->nbNeurons() ;
00363 list<Layer*>::iterator iter;
00364 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00365 nb += (*iter)->nbNeurons() ;
00366 return nb ;
00367 }
00368
00369 bool Network::existNeuron(int id_neur) {
00370 if (input->existNeuron(id_neur)) return true;
00371
00372 list<Layer*>::iterator iter;
00373 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00374 if ((*iter)->existNeuron(id_neur)) return true;
00375
00376 if (output->existNeuron(id_neur)) return true;
00377
00378 return false;
00379 }
00380
00381 int Network::nbAxons( void ) {
00382 return axons->size() ;
00383 }
00384
00385 void Network::addAxon(int id_neur1, int id_neur2) {
00386 Neuron *n1 = getNeuronFromId(id_neur1);
00387 Neuron *n2 = getNeuronFromId(id_neur2);
00388 if (n1 != NULL && n2 != NULL) {
00389 Axon *a = new Axon(nextFreeId, n1, n2);
00390 nextFreeId++;
00391 n1->addAxonOut(a);
00392 n2->addAxonIn(a);
00393 axons->push_back(a);
00394 }
00395 else QMessageBox::warning(0, "Warning", "addAxon : One or both of the neurons do not exist...");
00396 }
00397
00398 void Network::addAxon(int id, int id_neur1, int id_neur2) {
00399 Neuron *n1 = getNeuronFromId(id_neur1);
00400 Neuron *n2 = getNeuronFromId(id_neur2);
00401 if (n1 != NULL && n2 != NULL) {
00402 Axon *a = new Axon(id, n1, n2);
00403 n1->addAxonOut(a);
00404 n2->addAxonIn(a);
00405 axons->push_back(a);
00406 }
00407 else QMessageBox::warning(0, "Warning", "addAxon : One or both of the neurons do not exist...");
00408 }
00409
00410 void Network::addAxon(int id, int id_neur1, int id_neur2, double weight) {
00411 Neuron *n1 = getNeuronFromId(id_neur1);
00412 Neuron *n2 = getNeuronFromId(id_neur2);
00413 if (n1 != NULL && n2 != NULL) {
00414 Axon *a = new Axon(id, n1, n2);
00415 n1->addAxonOut(a);
00416 n2->addAxonIn(a);
00417 a->setWeight(weight) ;
00418 axons->push_back(a);
00419 }
00420 else QMessageBox::warning(0, "Warning", "addAxon : One or both of the neurons do not exist...");
00421 }
00422
00423
00424 void Network::removeAxon(int id_ax, bool b) {
00425 bool found = false;
00426 list<Axon*>::iterator iter = axons->begin();
00427 while (iter != axons->end() && !found) {
00428 if ((*iter)->getId() == id_ax) found = true;
00429 if (!found) iter++;
00430 }
00431 if (iter != axons->end()) {
00432 if (b) {
00433 (*iter)->getNeuronIn()->removeAxonOut(id_ax);
00434 (*iter)->getNeuronOut()->removeAxonIn(id_ax);
00435 }
00436 axons->erase(iter);
00437 delete (*iter);
00438 }
00439 }
00440
00441 void Network::removeAxons() {
00442 list<Axon*>::iterator iter;
00443 for (iter = axons->begin(); iter != axons->end(); iter++) {
00444 (*iter)->getNeuronIn()->removeAxonOut((*iter)->getId());
00445 (*iter)->getNeuronOut()->removeAxonIn((*iter)->getId());
00446 delete (*iter);
00447 }
00448 axons->clear();
00449 }
00450
00451 bool Network::existAxon( int id_ax ) {
00452 list<Axon*>::iterator iter;
00453 for (iter = axons->begin(); iter != axons->end(); iter++)
00454 if ((*iter)->getId() == id_ax) return true;
00455 return false;
00456 }
00457
00458 bool Network::existAxon(int id_neur_in, int id_neur_out) {
00459 list<Axon*>::iterator iter;
00460 for (iter = axons->begin(); iter != axons->end(); iter++)
00461 if (((*iter)->getNeuronIn()->getId() == id_neur_in) && ((*iter)->getNeuronOut()->getId() == id_neur_out))
00462 return true;
00463 return false;
00464 }
00465
00466 void Network::resetAllValues() {
00467 input->resetAllValues();
00468 output->resetAllValues();
00469 list<Layer*>::iterator iter;
00470 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00471 (*iter)->resetAllValues();
00472 }
00473
00474 void Network::resetTempValues() {
00475 input->resetTempValues();
00476 output->resetTempValues();
00477 list<Layer*>::iterator iter;
00478 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00479 (*iter)->resetTempValues();
00480 }
00481
00482 void Network::initLayerIn(vector<double> in) {
00483 int i = 0;
00484 list<Neuron*>::iterator iter;
00485 for (iter = input->getNeurons()->begin(); iter != input->getNeurons()->end(); iter++) {
00486 (*iter)->setOutValue(in[i]);
00487
00488
00489
00490
00491 i++;
00492 }
00493 }
00494
00495 void Network::forwardPropagation() {
00496 list<Layer*>::iterator iter;
00497 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00498 (*iter)->forwardPropagation();
00499 output->forwardPropagation();
00500 }
00501
00502 vector<double>* Network::getOutValues() {
00503 vector<double> *computed = new vector<double>();
00504 list<Neuron*>::iterator iter;
00505 for (iter = output->getNeurons()->begin(); iter != output->getNeurons()->end(); iter++)
00506 computed->push_back((*iter)->getOutValue());
00507 return computed;
00508 }
00509
00510 vector<double>* Network::computeOut(vector<double> *in) {
00511 resetTempValues();
00512 initLayerIn(*in);
00513 forwardPropagation();
00514 vector<double> *computed = getOutValues();
00515 return computed;
00516 }
00517
00518 double Network::computeErrorRate(vector<double> *errors) {
00519 double sum = 0.0;
00520 double err;
00521 vector<double>::iterator iter;
00522 for (iter = errors->begin(); iter != errors->end(); iter++){
00523 if ((*iter) < 0) err = -1 * (*iter);
00524 else err = (*iter);
00525 sum += err;
00526
00527
00528
00529
00530 }
00531 sum = sum / errors->size();
00532 return sum;
00533 }
00534
00535 vector<double>* Network::computeErrors(vector<double> *computed, vector<double> *expected) {
00536 vector<double> *errors = new vector<double>();
00537 vector<double>::iterator iterC = computed->begin();
00538 vector<double>::iterator iterE = expected->begin();
00539 while (iterC != computed->end()) {
00540 errors->push_back( (*iterE) - (*iterC) );
00541 iterE++;
00542 iterC++;
00543 }
00544 return errors;
00545 }
00546
00547 void Network::initLayerOutGradient(vector<double> *errors) {
00548 vector<double>::iterator iterE = errors->begin();
00549 list<Neuron*>::iterator iter;
00550 for (iter = output->getNeurons()->begin(); iter != output->getNeurons()->end(); iter++) {
00551 (*iter)->computeGradientValue((*iterE));
00552 iterE++;
00553 }
00554 }
00555
00556 void Network::gradientBackPropagation(vector<double> *errors) {
00557 initLayerOutGradient(errors);
00558 list<Layer*>::iterator iterL;
00559 for (iterL = hidden->end() ; iterL != hidden->begin(); ) {
00560 iterL--;
00561 (*iterL)->gradientBackPropagation();
00562 }
00563 input->gradientBackPropagation();
00564 }
00565
00566 void Network::changeAxonsWeight(double convergenceCoeff) {
00567 list<Axon*>::iterator iter;
00568 for (iter = axons->begin(); iter != axons->end(); iter++)
00569 (*iter)->changeWeight(convergenceCoeff);
00570 }
00571
00572 double Network::learnExample(vector<double> *exampleIn, vector<double> *exampleOut, double convergenceCoeff) {
00573 vector<double> *computedOut = computeOut(exampleIn);
00574 vector<double> *errors = computeErrors(computedOut, exampleOut);
00575 double errorRate = computeErrorRate(errors);
00576 gradientBackPropagation(errors);
00577 changeAxonsWeight(convergenceCoeff);
00578
00579 return errorRate;
00580 }
00581
00582 double Network::computeAverage(vector<double> v) {
00583 double av = 0.0;
00584 vector<double>::iterator iter;
00585 for (iter = v.begin(); iter != v.end(); iter++)
00586 av += (*iter);
00587 av = av / v.size();
00588 return av;
00589 }
00590
00591 list< vector<double> >* Network::readExampleFile(QTextStream *file, QString mode) {
00592 int begin = 0, end = 0;
00593 int nbIn = input->nbNeurons(), nbOut = output->nbNeurons();
00594 if (mode == "in") { begin = 0; end = nbIn; }
00595 else if (mode == "out") { begin = nbIn; end = nbIn + nbOut; }
00596 list< vector<double> > *examplesList = new list< vector<double> >();
00597 QString str = file->readLine();
00598 while (str != NULL) {
00599 vector<double> example;
00600 for (int i = begin; i < end; i++)
00601 example.push_back(str.section(';', i, i).toDouble());
00602 examplesList->push_back(example);
00603 str = file->readLine();
00604 }
00605 return examplesList;
00606 }
00607
00608 vector<double> Network::readTestFile(QTextStream *file) {
00609 int nbIn = input->nbNeurons();
00610 vector<double> example ;
00611 QString str = file->readLine();
00612 for (int i = 0; i < nbIn; i++)
00613 example.push_back(str.section(';', i, i).toDouble());
00614 return example;
00615 }
00616
00617 void Network::link2Layers(Layer *first, Layer *second) {
00618 list<Neuron*>::iterator iterNF, iterNS;
00619 for (iterNF = first->getNeurons()->begin(); iterNF != first->getNeurons()->end(); iterNF++)
00620 for (iterNS = second->getNeurons()->begin(); iterNS != second->getNeurons()->end(); iterNS++)
00621 addAxon((*iterNF)->getId(), (*iterNS)->getId());
00622 }
00623
00624 void Network::constructTopologyMLP() {
00625 if (nbHiddenLayers() == 0)
00626 link2Layers(input, output);
00627 else {
00628 link2Layers(input, hidden->front());
00629 list<Layer*>::iterator iterHL = hidden->begin();
00630 while ((*iterHL) != hidden->back()) {
00631 Layer *temp = (*iterHL);
00632 iterHL++;
00633 link2Layers(temp, (*iterHL));
00634 }
00635 link2Layers(hidden->back(), output);
00636 }
00637 }
00638
00639 void Network::constructTopologyCC() {
00640 if (nbHiddenLayers() == 0)
00641 link2Layers(input, output);
00642 else {
00643
00644 list<Layer*>::iterator iterHL;
00645 for (iterHL = hidden->begin(); iterHL != hidden->end(); iterHL++) {
00646 link2Layers(input, (*iterHL));
00647 }
00648 link2Layers(input, output);
00649 iterHL = hidden->begin();
00650 list<Layer*>::iterator iterHL2 = hidden->begin();
00651 while ((*iterHL) != hidden->back()) {
00652 iterHL++; iterHL2 = iterHL; iterHL--;
00653 for ( ; iterHL2 != hidden->end(); iterHL2++) {
00654 link2Layers((*iterHL), (*iterHL2));
00655 }
00656 iterHL++;
00657 link2Layers((*iterHL), output);
00658 }
00659 link2Layers(hidden->back(), output);
00660 }
00661 }
00662
00663 void Network::clearErrorEvolution() {
00664 errorEvolution->clear();
00665 }
00666
00667 void Network::setExampleOrder(int o) {
00668 exampleOrder = o;
00669 }
00670
00671 void Network::learning(QString filename, double errorThreshold, int maxNbEpoch, double convergenceCoeff) {
00672 QFile *file;
00673 QTextStream *filetext;
00674 file = new QFile(filename);
00675 if ( !file->open( IO_ReadOnly ) ) {
00676 QMessageBox::warning(0, "Warning", "The specified file cannot be opened...");
00677 return;
00678 }
00679 filetext = new QTextStream(file);
00680 list< vector<double> > *exampleInList = readExampleFile(filetext, "in");
00681 file->close();
00682 delete file;
00683 file = new QFile(filename);
00684 if ( !file->open( IO_ReadOnly ) ) return;
00685 delete filetext;
00686 filetext = new QTextStream(file);
00687 list< vector<double> > *exampleOutList = readExampleFile(filetext, "out");
00688
00689 list< vector<double> >::iterator iterIn, iterOut;
00690 vector<double> errors;
00691 double error, averageError = 1.0;
00692 currentEpoch = 1;
00693
00694 while ((averageError > errorThreshold) && (currentEpoch <= maxNbEpoch)) {
00695 if (exampleOrder == SEQUENTIAL) {
00696 for (iterIn = exampleInList->begin(), iterOut = exampleOutList->begin();
00697 iterIn != exampleInList->end(); iterIn++, iterOut++) {
00698 error = learnExample(&(*iterIn), &(*iterOut), convergenceCoeff);
00699 errors.push_back(error);
00700 if (EPS < 100 || lt.getState() != PLAY) {
00701 if (lt.getState() == STEP) lt.setState(PAUSE);
00702 lt.uwait(1000000 / EPS);
00703 }
00704 }
00705 } else if (exampleOrder == RANDOM_WITH) {
00706 int card = exampleInList->size() ;
00707 int i = 0 ;
00708 while(i<card) {
00709 int r = rg->IRandom(0, exampleInList->size()-1);
00710 iterIn = exampleInList->begin();
00711 iterOut = exampleOutList->begin();
00712 for (int k=0; k<r; k++) { iterIn++; iterOut++; }
00713 error = learnExample(&(*iterIn), &(*iterOut), convergenceCoeff);
00714 errors.push_back(error);
00715 if (EPS < 100 || lt.getState() != PLAY) {
00716 if (lt.getState() == STEP) lt.setState(PAUSE);
00717 lt.uwait(1000000 / EPS);
00718 }
00719 i++;
00720 }
00721 } else if (exampleOrder == RANDOM_WITHOUT) {
00722 list< vector<double> > *exampleInListCopy = copyVectorList(exampleInList);
00723 list< vector<double> > *exampleOutListCopy = copyVectorList(exampleOutList);
00724 while (exampleInListCopy->size() > 0) {
00725 int r = rg->IRandom(0, exampleInListCopy->size()-1);
00726 iterIn = exampleInListCopy->begin();
00727 iterOut = exampleOutListCopy->begin();
00728 for (int k=0; k<r; k++) { iterIn++; iterOut++; }
00729 error = learnExample(&(*iterIn), &(*iterOut), convergenceCoeff);
00730 exampleInListCopy->erase(iterIn);
00731 exampleOutListCopy->erase(iterOut);
00732 errors.push_back(error);
00733 if (EPS < 100 || lt.getState() != PLAY) {
00734 if (lt.getState() == STEP) lt.setState(PAUSE);
00735 lt.uwait(1000000 / EPS);
00736 }
00737 }
00738 }
00739
00740 averageError = computeAverage(errors);
00741 errors.clear();
00742 errorEvolution->push_back(averageError);
00743 currentAverageError = averageError;
00744 currentEpoch++;
00745 }
00746 }
00747
00748 list< vector<double> >* Network::copyVectorList(list< vector<double> > *vl) {
00749 list< vector<double> > *copy = new list< vector<double> >();
00750 list< vector<double> >::iterator iter;
00751 for (iter = vl->begin(); iter != vl->end(); iter++) {
00752 vector<double> vec;
00753 for (unsigned int i=0; i<(*iter).size(); i++)
00754 vec.push_back((*iter)[i]);
00755 copy->push_back(vec);
00756 }
00757 return copy;
00758 }
00759
00760 void Network::description() {
00761 cout << "Input Layer :" << endl;
00762 input->description();
00763 cout << "Hidden Layers (" << hidden->size() << ") :" << endl;
00764 list<Layer*>::iterator iter;
00765 for (iter = hidden->begin(); iter != hidden->end(); iter++)
00766 (*iter)->description();
00767 cout << "Output Layer :" << endl;
00768 output->description();
00769 }