StRoot  1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Groups Pages
StPmdNeuNet.cxx
1 
2 
3 #include "StPmdNeuNet.h"
4 #include<StMessMgr.h>
5 
6 // StPmdNeuNet
7 // Feed-Forward Neural Network
8 //
10 ClassImp(StPmdNeuNet)
11 
12 Float_t Teach[200000]={0.};
13 Float_t Value[200000]={0.};
14 
15 
17 StPmdNeuNet::StPmdNeuNet(const Text_t *name, Int_t nInput, const Text_t *hidden, Int_t nOutput):TNamed(name,"Neural Network")
18 {
19  ZeroAll();
20  AllocateVW(nInput,hidden,nOutput);
21 
22  fUseBiases=1.;
23  fLearnParam=0.2;
24  fFlatSE=0.;
25  fMu=0.;
26  fLowerInitWeight=-1.;
27  fUpperInitWeight=1.;
28 
29  fNTrainEvents=10; //default value was 0
30 
31  fNTrainCycles=100; //default value was 0
32 
33  TDatime temps;
34  fRandom.SetSeed(temps.Convert());
35  gMessMgr->Info()<<"StPmdNeuNet::StPmdNeuNet: First Random Seed = "<<fRandom.GetSeed();
36  gMessMgr->Info()<<"StPmdNeuNet::StPmdNeuNet: Neural Network is created";
37 
38 }
39 
40 
43 {
44  ZeroAll();
45  fUseBiases=1.;
46  fLearnParam=0.2;
47  fFlatSE=0.;
48  fMu=0.;
49  fLowerInitWeight=-1.;
50  fUpperInitWeight=1.;
51  fNHiddL=0;
52 
53  fNTrainEvents=10;
54  fNTrainCycles=100;
55 
56  TDatime temps;
57  fRandom.SetSeed(temps.Convert());
58  gMessMgr->Info()<<"StPmdNeuNet::StPmdNeuNet: First Random Seed = "<<fRandom.GetSeed();
59 }
60 
61 
62 
63 // Destructor
64 StPmdNeuNet::~StPmdNeuNet()
65 {
66  // destructor
67  gMessMgr->Info()<<"StPmdNeuNet::~StPmdNeuNet : we are done ";
68  DeleteArray();
69  FreeVW();
70  if(fEventsList) delete [] fEventsList;
71 }
72 
73 
75 void StPmdNeuNet::ZeroAll()
76 {
77  fValues = 0;
78  fErrors = 0;
79  fBiases = 0;
80  fNUnits = 0;
81  fW = 0;
82 
83  fArrayIn = 0;
84  fArrayOut = 0;
85  fTeach = 0;
86  fEventsList = 0;
87 
88  fDW = 0;
89  fDB = 0;
90 
91 }
92 
93 
94 void StPmdNeuNet::SetHidden(const Text_t *ttext)
95 {
96  Int_t i,j;
97  TString *number;
98  Text_t text[100];
99  strcpy(text,ttext);
100 
101  fNHiddL=1;
102  for (i=0;text[i];i++)if(text[i]==':')fNHiddL++;
103  if (fNUnits) delete [] fNUnits;
104  fNUnits = new Int_t[fNHiddL+2];
105 
106  j=0;
107  for (i=1;i<=fNHiddL;i++)
108  {
109  number=new TString();
110  while(text[j]&&(text[j]!=':')){number->Append(text[j]);j++;}
111  j++;
112  sscanf(number->Data(),"%i",&fNUnits[i]);
113  delete number;
114 // printf("%i \n",fNUnits[i]);
115  }
116 
117 }
118 
119 
120 void StPmdNeuNet::FreeVW()
121 {
122  Int_t i,l;
123 
124  // free of values
125 
126  if (fValues)
127  {
128  for (i=0;i<fNHiddL+2;i++)
129  {delete [] fValues[i]; delete [] fErrors[i]; delete [] fBiases[i];delete [] fDB[i];}
130  delete [] fValues; delete [] fErrors; delete [] fBiases;delete [] fDB;
131  fValues=0;
132  }
133 
134  // free of teaching
135 
136  if (fTeach)
137  {
138  delete [] fTeach;
139  fTeach=0;
140  }
141 
142  // free of weights
143 
144  if (fW)
145  {
146  for (i=0;i<fNHiddL+1;i++)
147  {
148  for(l=0;l<fNUnits[i];l++){delete [] fW[i][l];delete [] fDW[i][l];}
149  delete [] fW[i];delete [] fDW[i];
150  }
151  fW=0;
152  }
153 
154  // free of units
155 
156  if (fNUnits){ delete [] fNUnits; fNUnits=0;}
157 }
158 
159 void StPmdNeuNet::AllocateVW(Int_t nInput, const Text_t *hidden, Int_t nOutput)
160 {
161  Int_t i,l;
162 
163  if(fW){
164  gMessMgr->Info()<<"StPmdNeuNet::AllocateVW: free memory first ";
165  return;
166  }
167 
168  SetHidden(hidden);
169  fNUnits[0]=nInput;
170  fNUnits[fNHiddL+1]=nOutput;
171 
172  // allocation of values
173 
174  fValues = new Float_t*[fNHiddL+2];
175  fErrors = new Double_t*[fNHiddL+2];
176  fBiases = new Double_t*[fNHiddL+2];
177  fDB = new Double_t*[fNHiddL+2];
178 
179  for (i=0;i<fNHiddL+2;i++)
180  {
181  fValues[i]=new Float_t[fNUnits[i]];
182  fErrors[i]=new Double_t[fNUnits[i]];
183  fBiases[i]=new Double_t[fNUnits[i]];
184  fDB[i]=new Double_t[fNUnits[i]];
185  }
186 
187  // allocation of teaching
188 
189  fTeach=new Float_t[fNUnits[fNHiddL+1]];
190 
191  // allocation of weights
192 
193  fW=new Double_t**[fNHiddL+1];
194  fDW=new Double_t**[fNHiddL+1];
195 
196  for (i=0;i<fNHiddL+1;i++)
197  {
198  fW[i]=new Double_t*[fNUnits[i]];
199  fDW[i]=new Double_t*[fNUnits[i]];
200  for (l=0;l<fNUnits[i];l++)
201  {
202  fW[i][l]=new Double_t[fNUnits[i+1]];
203  fDW[i][l]=new Double_t[fNUnits[i+1]];
204  }
205  }
206 
207 }
208 
209 void StPmdNeuNet::SetKernel(Int_t nInput, const Text_t *hidden, Int_t nOutput)
210 {
211  FreeVW();
212  AllocateVW(nInput,hidden,nOutput);
213 }
214 
215 
222 void StPmdNeuNet::SetLearnParam(Double_t learnParam,Double_t fse,Double_t mu)
223 {
224  fLearnParam=fabs(learnParam);
225  fFlatSE=fabs(fse);
226  fMu=fabs(mu);
227 
228  gMessMgr->Info()<<"StPmdNeuNet::AllocateVW: free memory first ";
229  if (fLearnParam>1.0) gMessMgr->Info()<<"StPmdNeuNet::SetLearnParam: Warning : is not an usual value "<<fLearnParam;
230  if (fLearnParam==0.0) gMessMgr->Info()<<"StPmdNeuNet::SetLearnParam: Warning : 0 is a stupid value";
231  gMessMgr->Info()<<"StPmdNeuNet::SetLearnParam: Learning Parameter set to : "<<fLearnParam;
232  gMessMgr->Info()<<"StPmdNeuNet::SetLearnParam: Flat Spot elimination value set to :"<<fFlatSE;
233  gMessMgr->Info()<<"StPmdNeuNet::SetLearnParam: Momentum set to : "<<fMu;
234 }
235 
236 
238 void StPmdNeuNet::SetInitParam(Float_t lowerInitWeight, Float_t upperInitWeight)
239 {
240  Float_t temp;
241 
242  fLowerInitWeight=lowerInitWeight;
243  fUpperInitWeight=upperInitWeight;
244  if (fLowerInitWeight>fUpperInitWeight)
245  {
246  temp=fUpperInitWeight;
247  fUpperInitWeight=fLowerInitWeight;
248  fLowerInitWeight=temp;
249  }
250  if (fLowerInitWeight==fUpperInitWeight)
251  gMessMgr->Info()<<"StPmdNeuNet::SetInitParam: Warning : the weights initialisation bounds are equal ";
252  gMessMgr->Info()<<"StPmdNeuNet::SetInitParam: Init Parameters set to ";
253  gMessMgr->Info()<<"StPmdNeuNet::SetInitParam: --> Lower bound = "<<fLowerInitWeight;
254  gMessMgr->Info()<<"StPmdNeuNet::SetInitParam: --> Upper bound = "<<fUpperInitWeight;
255 
256 }
257 
258 
259 Float_t StPmdNeuNet::Alea()
260 {
261  return fLowerInitWeight+fRandom.Rndm()*(fUpperInitWeight-fLowerInitWeight);
262 }
263 
264 
265 
272 {
273  Int_t i,l,c;
274 
275  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::Init: allocate memory first";return;}
276 
277  // init of weights
278 
279  for (i=0;i<fNHiddL+1;i++)
280  for (l=0;l<fNUnits[i];l++)
281  for (c=0;c<fNUnits[i+1];c++) fW[i][l][c]=(Double_t)Alea();
282 
283  for(i=0;i<fNHiddL+1;i++)for(l=0;l<fNUnits[i];l++)for(c=0;c<fNUnits[i+1];c++)
284  fDW[i][l][c]=0.;
285 
286  // init of biases
287 
288  for (i=1;i<fNHiddL+2;i++)
289  for (l=0;l<fNUnits[i];l++) fBiases[i][l]=(Double_t)(Alea())*fUseBiases;
290 
291  for(i=1;i<fNHiddL+2;i++)for(l=0;l<fNUnits[i];l++)fDB[i][l]=0.;
292 
293 
294  fNTrainCycles=0;
295  gMessMgr->Info()<<"StPmdNeuNet::Init: Initialisation done";
296 }
297 
298 
301 {
302  Int_t i,l,c;
303 
304  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::PrintS: no unit ";return;}
305 
306  gMessMgr->Info()<<"StPmdNeuNet::PrintS: +++++++++ Neural Network ++++++++++++"<<GetName();
307  for(i=0;i<fNHiddL+2;i++)gMessMgr->Info()<<"StPmdNeuNet::PrintS: Layer contains units"<<i<<" "<<fNUnits[i];
308 
309  if(fUseBiases)gMessMgr->Info()<<"StPmdNeuNet::PrintS: >>>>>>> Biases USED";
310  else gMessMgr->Info()<<"StPmdNeuNet::PrintS: >>>>>>>Biases DUMMY";
311 
312  gMessMgr->Info()<<"StPmdNeuNet::PrintS: ---------- Biases ---------- ";
313  Int_t maxl=0;
314  for(i=0;i<fNHiddL+2;i++)if(fNUnits[i]>=maxl)maxl=fNUnits[i];
315  for(i=0;i<fNHiddL+2;i++)gMessMgr->Info()<<" | "<<i;
316  for(i=0;i<fNHiddL+2;i++)gMessMgr->Info()<<"--------|-";
317  for(l=0;l<maxl;l++)
318  {
319  for(i=0;i<fNHiddL+2;i++)
320  if(l<fNUnits[i])gMessMgr->Info()<<"StPmdNeuNet::PrintS: | "<<fBiases[i][l];
321  else gMessMgr->Info()<<" | ";
322  }
323 
324 
325  gMessMgr->Info()<<"StPmdNeuNet::PrintS: ---------- Weights ----------- ";
326  for(i=0;i<fNHiddL+1;i++)
327  {
328  gMessMgr->Info()<<"StPmdNeuNet::PrintS: From "<<i<<" to " <<i+1;
329  gMessMgr->Info()<<"StPmdNeuNet::PrintS: "<<i;for(l=0;l<fNUnits[i];l++)gMessMgr->Info()<<" |"<<l;
330  gMessMgr->Info()<<"StPmdNeuNet::PrintS: ===|";for(l=0;l<fNUnits[i];l++)gMessMgr->Info()<<"-------";
331  gMessMgr->Info()<<"StPmdNeuNet::PrintS: |"<<i+1; for(l=0;l<fNUnits[i];l++)gMessMgr->Info()<<"-------";
332  for(c=0;c<fNUnits[i+1];c++)
333  {
334  gMessMgr->Info()<<"StPmdNeuNet::PrintS: |"<<c;
335  for(l=0;l<fNUnits[i];l++)gMessMgr->Info()<<"|"<<fW[i][l][c];
336  }
337  }
338 
339  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Learning parameter = "<<fLearnParam;
340  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Flat Spot elimination value = "<<fFlatSE;
341  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Momentum = "<<fMu;
342  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Lower initialisation weight = "<<fLowerInitWeight;
343  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Upper initialisation weight = "<<fUpperInitWeight;
344  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Number of events for training = "<<fNTrainEvents;
345  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Number of events for validation = "<<fNValidEvents;
346  gMessMgr->Info()<<"StPmdNeuNet::PrintS: Number of cycles done = "<<fNTrainCycles;
347  gMessMgr->Info()<<"StPmdNeuNet::PrintS: +++++++++++++++++++++++++++++++++++++++++++++++";
348 
349 }
350 
355 void StPmdNeuNet::Forward()
356 {
357  Int_t i,l,c;
358  Double_t sum;
359  // cout<<"Valid forward called "<<endl;
360  if(!fW){
361  gMessMgr->Info()<<"StPmdNeuNet::Forward no unit !";
362  return;
363  }
364 
365  for (i=0;i<fNHiddL+1;i++)
366  for (c=0;c<fNUnits[i+1];c++)
367  {
368  sum=0.;
369  for(l=0;l<fNUnits[i];l++)sum+=fW[i][l][c]*(Double_t)fValues[i][l];
370  fValues[i+1][c]=(Float_t)Sigmoide(sum+fBiases[i+1][c]*fUseBiases);
371  }
372 }
373 
374 
375 
377 void StPmdNeuNet::LearnBackward()
378 {
379  if(fNTrainEvents<1){gMessMgr->Info()<<"StPmdNeuNet::LearnBackward: No event to train !!!";return;}
380  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::LearnBackward: no unit !";return;}
381 
382  Int_t i,l,c;
383  Double_t delta;
384 
385  // weights
386 
387  for (i=0;i<fNHiddL+1;i++)
388  for (l=0;l<fNUnits[i];l++)
389  for(c=0;c<fNUnits[i+1];c++)
390  {
391  delta=fLearnParam*fErrors[i+1][c]*(Double_t)fValues[i][l]+fMu*fDW[i][l][c];
392  fW[i][l][c]+=delta;
393  fDW[i][l][c]=delta;
394  }
395  // biases
396  if(((Bool_t)fUseBiases))
397  {
398  for (i=1;i<fNHiddL+2;i++)
399  for (l=0;l<fNUnits[i];l++)
400  {
401  delta=fLearnParam*fErrors[i][l]+fMu*fDB[i][l];
402  fBiases[i][l]+=delta;
403  fDB[i][l]=delta;
404  }
405  }
406 }
407 
408 
413 Double_t StPmdNeuNet::Error()
414 {
415 
416  Int_t i,l,c;
417  Double_t sum,error=0,errorOneUnit;
418  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::Error: no unit !";return 0;}
419 
420  // Error on Output Units
421 
422  for(l=0;l<fNUnits[fNHiddL+1];l++)
423  {
424  errorOneUnit=(Double_t)(fTeach[l]-fValues[fNHiddL+1][l]);
425  // cout<<"teach "<<fTeach[l]<<"Value "<<fValues[fNHiddL+1][l]<<endl;
426 
427  error+=fabs(errorOneUnit);
428  fErrors[fNHiddL+1][l]=errorOneUnit*(SigPrim(fValues[fNHiddL+1][l])+fFlatSE);
429  }
430  error=error/(Double_t)fNUnits[fNHiddL+1];
431 
432  // Error on Hidden Units
433 
434  for(i=fNHiddL;i==1;i--)
435  {
436  for(l=0;l<fNUnits[i];l++)
437  {
438  sum=0.;
439  for(c=0;c<fNUnits[i+1];c++) sum+=fW[i][l][c]*fErrors[i+1][c];
440  fErrors[i][l]=sum*(SigPrim((Double_t)fValues[i][l])+fFlatSE);
441  }
442  }
443 
444  return error;
445 }
446 
447 
453 Double_t StPmdNeuNet::ErrorO()
454 {
455 
456 
457 // cout<<"Error0 called "<<endl;
458  Int_t l;
459  Double_t error=0;
460  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::ErrorO: no unit !";return 0;}
461  for(l=0;l<fNUnits[fNHiddL+1];l++)
462  error+=fabs((Double_t)(fTeach[l]-fValues[fNHiddL+1][l]));
463  error=error/(Double_t)fNUnits[fNHiddL+1];
464  return error;
465 
466 }
467 
468 
478 {
479  if(fNTrainEvents<1){gMessMgr->Info()<<"StPmdNeuNet::TrainOneCycle: No event to train !!!";return 0.;}
480  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::TrainOneCycle: no unit !";return 0.;}
481 
482 
483  Int_t i;
484  Double_t error=0.;
485 
486  for(i=0;i<fNTrainEvents;i++)
487  {
488  GetArrayEvt(fEventsList[i]);
489  Forward();
490  for(Int_t l=0;l<fNUnits[fNHiddL+1];l++)
491  {
492  Teach[i]=fTeach[l];
493  Value[i]=fValues[fNHiddL+1][l];
494 
495  // cout<<"evt "<<i<<"teach **"<<fTeach[l]<<"favle "<<fValues[fNHiddL+1][l]<<endl;
496  }
497 
498  error+=Error();
499  LearnBackward();
500 
501 
502  }
503 
504  fNTrainCycles++;
505  error=error/(Double_t)fNTrainEvents;
506  gMessMgr->Info()<<"StPmdNeuNet::TrainOneCycle: cycle : E_t = "<<fNTrainCycles<<" "<<error;
507 
508  return error;
509 }
510 
511 
519 {
520  if(fNValidEvents<1) return 0.;
521 
522 
523  // we will now pass all the validation events through the kernel, and
524  // compute the mean error on output
525  Double_t error=0.;
526 
527  for (Int_t j=0;j<fNValidEvents;j++)
528  {
529  error+=GoThrough(); // forward propagation and error on one event
530  }
531  error=error/(Double_t)fNValidEvents; // mean
532  return error;
533 }
534 
535 
540 void StPmdNeuNet::TrainNCycles(Int_t nCycles)
541 {
542  //sub if(!conte){gMessMgr->Info()<<"no controller !";return;}
543  Float_t errt,errv;
544  for(Int_t i=0;i<nCycles;i++)
545  {
546  Mix();
547  errt=(Float_t)TrainOneCycle();
548  errv=(Float_t)Valid();
549  gMessMgr->Info()<<"StPmdNeuNet::TrainNCycles: cycle > train : "<<fNTrainCycles<<" "<<errt;
550  if(fNValidEvents)gMessMgr->Info()<<"StPmdNeuNet::TrainNCycles: and valid : ";
551  else gMessMgr->Info()<<(" ");
552 
553  }
554 
555 }
556 
557 
564 void StPmdNeuNet::Export(const Text_t *fileName)
565 {
566  Int_t i,l,c;
567 
568  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::Export: no unit !";return;}
569 
570  FILE *file=0;
571  file = fopen(fileName,"w");
572  if ( ! file){
573  gMessMgr->Info()<<"StPmdNeuNet::Export: ERROR Cannot open for write "<<fileName;
574  return;
575  }
576 
577  fprintf(file,"%8i\n",fNTrainEvents);
578  for(l=0;l<fNTrainEvents;l++)fprintf(file,"%8.4f %8.4f\n",Teach[l],Value[l]);
579 
581  m_DiscMaker->mNNoutput->Fill(Value[l]);
582 
583  fprintf(file,"%3i\n",fNHiddL);
584  for(i=0;i<fNHiddL+2;i++)fprintf(file,"%3i\n",fNUnits[i]);
585 
586  for(i=0;i<fNHiddL+2;i++)
587  for(l=0;l<fNUnits[i];l++)fprintf(file,"%8.4f\n",fBiases[i][l]);
588 
589  for(i=0;i<fNHiddL+1;i++)
590  for(l=0;l<fNUnits[i];l++)
591  for(c=0;c<fNUnits[i+1];c++)fprintf(file,"%8.4f\n",fW[i][l][c]);
592 
593  fprintf(file,"%5i\n",fNTrainCycles);
594  fprintf(file,"%2.0f\n",fUseBiases);
595 
596  fclose(file);
597 }
598 
599 
606 void StPmdNeuNet::Import(const Text_t *fileName)
607 {
608  Int_t i,l,c,newI,newHL,newO;
609  Text_t hidden[100],piece[5];
610  FILE *file=0;
611  file = fopen(fileName,"r");
612 
613  if ( ! file){
614  gMessMgr->Info()<<"StPmdNeuNet::Import: ERROR Cannot open for read"<<fileName;
615  return;
616  }
617 
618  fscanf(file,"%3i",&newHL);
619  fscanf(file,"%3i",&newI);
620  strcpy(hidden,"");
621  for(i=1;i<newHL;i++)
622  {fscanf(file,"%s",piece);strcat(hidden,piece);strcat(hidden,":");}
623  fscanf(file,"%s",piece);strcat(hidden,piece);
624  fscanf(file,"%3i",&newO);
625 
626  gMessMgr->Info()<<"StPmdNeuNet::Import: New NN set to : "<<newI<<" "<<hidden<<" "<<newO;
627  FreeVW();
628 
629  gMessMgr->Info()<<"StPmdNeuNet::Import: Allocating";
630  AllocateVW(newI,hidden,newO);
631 
632  gMessMgr->Info()<<"StPmdNeuNet::Import: Filling fDB+fscanf()";
633  Float_t tmpfl;
634  for(i=0;i<fNHiddL+2;i++)
635  for(l=0;l<fNUnits[i];l++){fDB[i][l]=0.;fscanf(file,"%f",&tmpfl);*(fBiases[i]+l)=(Double_t)tmpfl;}
636 
637  for(i=0;i<fNHiddL+1;i++)
638  for(l=0;l<fNUnits[i];l++)
639  for(c=0;c<fNUnits[i+1];c++){
640  fDW[i][l][c]=0.;fscanf(file,"%f",&tmpfl);*(fW[i][l]+c)=(Double_t)tmpfl;
641  // cout<<"Nhidd "<<i<<"Nunit "<<l<<"unit_next "<<c<<"wei "<<fW[i][l][c]<<endl;
642  }
643 
644 
645  fscanf(file,"%5i",&fNTrainCycles);
646  fscanf(file,"%f",&tmpfl);fUseBiases=(Double_t)tmpfl;
647  fclose(file);
648  gMessMgr->Info()<<"StPmdNeuNet::Import: Done";
649 }
650 
651 
657 {
658  Int_t i,i1,i2;
659  Int_t temp;
660  for (i=0;i<3*fNTrainEvents;i++)
661  {
662  i1=(Int_t)(fRandom.Rndm()*(Float_t)fNTrainEvents);
663  i2=(Int_t)(fRandom.Rndm()*(Float_t)fNTrainEvents);
664  temp=fEventsList[i1];
665  fEventsList[i1]=fEventsList[i2];
666  fEventsList[i2]=temp;
667  }
668 
669  // for (i=0;i<fNTrainEvents;i++)printf("%i \n",fEventsList[i]);
670  // printf("Mixed ... ");
671 }
672 
673 
674 void StPmdNeuNet::SetArraySize(Int_t size)
675 {
676  DeleteArray();
677  if (fEventsList) delete [] fEventsList;
678  if(!size)return;
679  Int_t i;
680  fNTrainEvents=size;
681  fArrayIn = new Float_t*[fNTrainEvents];
682  for (i=0;i<fNTrainEvents;i++) fArrayIn[i] = new Float_t[fNUnits[0]];
683 
684 
685  fArrayOut = new Float_t*[fNTrainEvents];
686  for (i=0;i<fNTrainEvents;i++) fArrayOut[i] = new Float_t[fNUnits[fNHiddL+1]];
687 
688  fEventsList = new Int_t[fNTrainEvents];
689  for (i=0;i<fNTrainEvents;i++)fEventsList[i]=i;
690 }
691 
692 void StPmdNeuNet::DeleteArray()
693 {
694  Int_t i;
695 
696  if(fArrayIn)
697  {
698  for (i=0;i<fNTrainEvents;i++)delete [] fArrayIn[i];
699  delete [] fArrayIn;
700  fArrayIn=0;
701  }
702 
703  if(fArrayOut)
704  {
705  for (i=0;i<fNTrainEvents;i++)delete [] fArrayOut[i];
706  delete [] fArrayOut;
707  fArrayOut=0;
708  }
709 
710 }
711 
712 /*
713 void StPmdNeuNet::SetTrainTree(TNNTree *t)
714 {
715 // method to associate a TNNTree to the kernel :
716 // the events of the tree will be transferred in the internal
717 // array of the kernel.
718 
719  if(!t){printf("no tree !\n");return;}
720  Int_t i;
721 
722 //allocation
723 
724  SetArraySize((Int_t)(t->GetTree()->GetEntries()));
725  printf(" nbr evts for training : %i \n",GetNTrainEvents());
726 
727 // loop
728 // the methods GetInputAdr() and GetTeachAdr()
729 // return the adresses of arrays in kernel, and the method
730 // GetEvent fills these adresses with event i of the train tree t
731 // the method Fill(i) translates the filled arrays in the internal array
732 
733  for (i=0;i<(Int_t)(t->GetTree()->GetEntries());i++)
734  {
735  t->GetEvent(GetInputAdr(),GetTeachAdr(),i);
736  Fill(i);
737  }
738 
739 }
740 
741 void StPmdNeuNet::SetValidTree(TNNTree *t)
742 {
743 // method to associate a TNNTree to the kernel :
744 // a link will be done between the tree and the kernel.
745 // it is not necessary to keep these events in the kernel
746 
747  if(!t){printf("no tree !\n");return;}
748  fValidTree=t;
749  fNValidEvents=(Int_t)(t->GetTree()->GetEntries());
750 }
751 
752 */
753 
754 void StPmdNeuNet::FillArray(Int_t iev,Int_t iunit,Float_t value)
755 {
756  //cout<<"inside fillarray**"<<iev<<" "<<iunit<<" "<<value<<endl;
757 
758  fArrayIn[iev][iunit]=value;
759 }
760 
761 
762 
771 Double_t StPmdNeuNet::ApplyWeights(Float_t *Teach,Float_t *Value)
772 {
773  if(fNTrainEvents<1){gMessMgr->Info()<<"StPmdNeuNet::ApplyWeights: No event to train !!!";return 0.;}
774  if(!fW){gMessMgr->Info()<<"StPmdNeuNet::ApplyWeights: no unit !";return 0.;}
775  FILE *file1;
776  file1=fopen("testout","w");
777 
778  Int_t i;
779  Double_t error=0.;
780 
781  for(i=0;i<fNTrainEvents;i++)
782  {
783  GetArrayEvt(fEventsList[i]);
784  Forward();
785  for(Int_t l=0;l<fNUnits[fNHiddL+1];l++)
786  {
787  Teach[i]=fTeach[l];
788  Value[i]=fValues[fNHiddL+1][l];
789 
790 
791  // cout<<"evt "<<i<<"teach **"<<fTeach[l]<<"favle "<<fValues[fNHiddL+1][l]<<endl;
792  }
793  fprintf(file1,"%d %8.4f %8.4f\n",i,Teach[i],Value[i]);
794 
795  error+=Error();
796  // LearnBackward();
797 
798 
799  }
800 
801  fNTrainCycles++;
802  error=error/(Double_t)fNTrainEvents;
803  // printf("cycle %i : E_t = %6.4f ",fNTrainCycles,error);
804 
805  return error;
806 }
virtual Double_t ApplyWeights(Float_t *, Float_t *)
virtual Double_t Valid()
Definition: FJcore.h:367
virtual void SetLearnParam(Double_t learnParam=0.2, Double_t fse=0., Double_t mu=0.)
virtual void Init()
StPmdNeuNet()
Constructor with no parameter . Purpose ??
Definition: StPmdNeuNet.cxx:42
virtual void SetInitParam(Float_t lowerInitWeight=-1., Float_t upperInitWeight=1.)
Sets the initialisation parameters : max and min weights.
virtual void TrainNCycles(Int_t nCycles=10)
virtual void Export(const Text_t *fileName="exportNN.dat")
virtual void Import(const Text_t *fileName="exportNN.dat")
virtual void PrintS()
prints structure of network on screen
virtual Double_t TrainOneCycle()
virtual void Mix()