TORCS  1.3.9
The Open Racing Car Simulator
ANN.cpp
Go to the documentation of this file.
1 /* -*- Mode: C++; -*- */
2 /* VER: $Id$ */
3 // copyright (c) 2004 by Christos Dimitrakakis <dimitrak@idiap.ch>
4 /***************************************************************************
5  * *
6  * This program is free software; you can redistribute it and/or modify *
7  * it under the terms of the GNU General Public License as published by *
8  * the Free Software Foundation; either version 2 of the License, or *
9  * (at your option) any later version. *
10  * *
11  ***************************************************************************/
12 
13 #include <cstring>
14 #include <learning/ANN.h>
15 #include <learning/string_utils.h>
16 #include <learning/Distribution.h>
17 
18 
19 #undef ANN_DBUG
20 
21 //==========================================================
22 // NewANN
23 //----------------------------------------------------------
25 ANN *NewANN(int n_inputs, int n_outputs)
26 {
27  ANN *ann = NULL;
28 
29  if (!(ann = AllocM(ANN, 1))) {
30  Serror("Could not allocate ANN\n");
31  return NULL;
32  }
33  ann->x = NULL;
34  ann->y = NULL;
35  ann->t = NULL;
36  ann->d = NULL;
37  ann->error = NULL;
38  ann->c = NULL;
39  ann->a = 0.1f;
40  ann->lambda = 0.9f;
41  ann->zeta = 0.9f;
42  ann->n_inputs = n_inputs;
43  ann->n_outputs = n_outputs;
44  ann->batch_mode = false;
45 
46 
47  /* outputs are not allocated */
48  //logmsg ("Creating ANN with %d inputs and %d outputs\n", n_inputs, n_outputs);
49  if (!(ann->error = AllocM(real, n_outputs))) {
50  Serror("Could not allocate errors\n");
51  DeleteANN(ann);
52  return NULL;
53  }
54 
55  if (!(ann->d = AllocM(real, n_outputs))) {
56  Serror("Could not allocate derivatives\n");
57  DeleteANN(ann);
58  return NULL;
59  }
60 
61  if (!(ann->c = List())) {
62  Serror("Could not allocate list\n");
63  DeleteANN(ann);
64  return NULL;
65  }
66 #ifdef ANN_DBUG
67  message("Creating ANN with %d inputs and %d outputs", n_inputs,
68  n_outputs);
69 #endif
70  return ann;
71 }
72 
73 //==========================================================
74 // DeleteANN
75 //----------------------------------------------------------
77 int DeleteANN(ANN * ann)
78 {
79  if (!ann) {
80  Swarning("Attempting to delete NULL ANN\n");
81  return DEC_ARG_INVALID;
82  }
83 
84  if (ann->error) {
85  FreeM(ann->error);
86  }
87 
88  //if (ann->x) {
89  // FreeM (ann->x);
90  //}
91 
92  if (ann->d) {
93  FreeM(ann->d);
94  }
95 
96  /* We must clear all allocations in the list */
97  if (ann->c) {
98  ClearList(ann->c);
99  ann->c = NULL;
100  }
101 
102  FreeM(ann);
103  return 0;
104 }
105 
106 
107 //==========================================================
108 // ANN_AddHiddenLayer()
109 //----------------------------------------------------------
111 int ANN_AddHiddenLayer(ANN * ann, int n_nodes)
112 {
113 #ifdef ANN_DBUG
114  message("Adding Hidden layer with %d nodes", n_nodes);
115 #endif
116 
117  LISTITEM *item = LastListItem(ann->c);
118  if (item) {
119  Layer *p = (Layer *) item->obj;
120  ANN_AddLayer(ann, p->n_outputs, n_nodes, p->y);
121  } else {
122  ANN_AddLayer(ann, ann->n_inputs, n_nodes, ann->x);
123  }
124  return 0;
125 }
126 
127 //==========================================================
128 // ANN_AddRBFHiddenLayer()
129 //----------------------------------------------------------
131 int ANN_AddRBFHiddenLayer(ANN * ann, int n_nodes)
132 {
133 #ifdef ANN_DBUG
134  message("Adding Hidden layer with %d nodes", n_nodes);
135 #endif
136  LISTITEM *item = LastListItem(ann->c);
137  if (item) {
138  Layer *p = (Layer *) item->obj;
139  ANN_AddRBFLayer(ann, p->n_outputs, n_nodes, p->y);
140  } else {
141  ANN_AddRBFLayer(ann, ann->n_inputs, n_nodes, ann->x);
142  }
143  return 0;
144 }
145 
146 
147 
148 //==========================================================
149 // ANN_AddLayer()
150 //----------------------------------------------------------
152 Layer *ANN_AddLayer(ANN * ann, int n_inputs, int n_outputs, real * x)
153 {
154  Layer *l = NULL;
155  if ((x == NULL) && (ann->c->n)) {
156  Swarning
157  ("Layer connects to null but layer list is not empty\n");
158  }
159 
160 
161  if (!(l = AllocM(Layer, 1))) {
162  Serror("Could not allocate layer structure\n");
163  return NULL;
164  }
165 
166  assert(n_inputs > 0);
167  assert(n_outputs > 0);
168 
169  l->n_inputs = n_inputs;
170  l->n_outputs = n_outputs;
171  l->x = x;
172  l->a = ann->a;
173  l->zeta = ann->zeta;
174  l->lambda = ann->lambda;
177  l->f = &htan;
178  l->f_d = &htan_d;
179  //l->f = &dtan;
180  // l->f_d = &dtan_d;
181  l->batch_mode = false;
182  if (!(l->y = AllocM(real, n_outputs))) {
183  Serror("Could not allocate layer outputs\n");
184  ANN_FreeLayer(l);
185  return NULL;
186  }
187  int i;
188  for (i=0; i<n_outputs; i++) {
189  l->y[i] = 0.0;
190  }
191 
192  if (!(l->z = AllocM(real, n_outputs))) {
193  Serror("Could not allocate layer activations\n");
194  ANN_FreeLayer(l);
195  return NULL;
196  }
197  for (i=0; i<n_outputs; i++) {
198  l->z[i] = 0.0;
199  }
200  if (!(l->d = AllocM(real, n_inputs + 1 /*bias */ ))) {
201  Serror("Could not allocate layer outputs\n");
202  ANN_FreeLayer(l);
203  return NULL;
204  }
205  for (i=0; i<n_inputs+1; i++) {
206  l->d[i] = 0.0;
207  }
208 
209  if (!
210  (l->c =
211  AllocM(Connection, (n_inputs + 1 /*bias */ ) * n_outputs))) {
212  Serror("Could not allocate connections\n");
213  ANN_FreeLayer(l);
214  return NULL;
215  }
216 
217  l->rbf = NULL;
218 
219  real bound = 2.0f / sqrt((real) n_inputs);
220  for (i = 0; i < n_inputs + 1 /*bias */ ; i++) {
221  Connection *c = &l->c[i * n_outputs];
222  for (int j = 0; j < n_outputs; j++) {
223  c->w = (urandom() - 0.5f)* bound;;
224  c->c = 1;
225  c->e = 0.0f;
226  c->dw = 0.0f;
227  c->v = 1.0;
228  c++;
229  }
230  }
231  ListAppend(ann->c, (void *) l, &ANN_FreeLayer);
232  return l;
233 }
234 
235 
236 //==========================================================
237 // ANN_AddRBFLayer()
238 //----------------------------------------------------------
240 Layer *ANN_AddRBFLayer(ANN * ann, int n_inputs, int n_outputs, real * x)
241 {
242  Layer *l = NULL;
243  if ((x == NULL) && (ann->c->n)) {
244  Swarning
245  ("Layer connects to null and layer list not empty\n");
246  }
247 
248  if (!(l = AllocM(Layer, 1))) {
249  Serror("Could not allocate layer structure\n");
250  return NULL;
251  }
252 
253  assert(n_inputs > 0);
254  assert(n_outputs > 0);
255 
256  l->n_inputs = n_inputs;
257  l->n_outputs = n_outputs;
258  l->x = x;
259  l->a = ann->a;
262  l->f = &Exp;
263  l->f_d = &Exp_d;
264  l->batch_mode = false;
265 
266  if (!(l->y = AllocM(real, n_outputs))) {
267  Serror("Could not allocate layer outputs\n");
268  ANN_FreeLayer(l);
269  return NULL;
270  }
271 
272  if (!(l->z = AllocM(real, n_outputs))) {
273  Serror("Could not allocate layer activations\n");
274  ANN_FreeLayer(l);
275  return NULL;
276  }
277 
278  if (!(l->d = AllocM(real, n_inputs + 1 /*bias */ ))) {
279  Serror("Could not allocate layer outputs\n");
280  ANN_FreeLayer(l);
281  return NULL;
282  }
283 
284  if (!
285  (l->rbf =
287  (n_inputs + 1 /*bias */ ) * n_outputs))) {
288  Serror("Could not allocate connections\n");
289  ANN_FreeLayer(l);
290  return NULL;
291  }
292 
293  l->c = NULL;
294 
295  real bound = 2.0f / sqrt((real) n_inputs);
296  for (int i = 0; i < n_inputs + 1 /*bias */ ; i++) {
297  RBFConnection *c = &l->rbf[i * n_outputs];
298  for (int j = 0; j < n_outputs; j++) {
299  c->w = (urandom() - 0.5f) * bound;;
300  c->m = (urandom() - 0.5f) * 2.0f;
301  c++;
302  }
303  }
304  ListAppend(ann->c, (void *) l, &ANN_FreeLayer);
305  return l;
306 }
307 
308 
309 //==========================================================
310 // ANN_FreeLayer()
311 //----------------------------------------------------------
313 void ANN_FreeLayer(void *l)
314 {
315  ANN_FreeLayer((Layer *) l);
316 }
317 
318 //==========================================================
319 // ANN_FreeLayer()
320 //----------------------------------------------------------
323 {
324  FreeM(l->y);
325  if (l->z) {
326  FreeM(l->z);
327  }
328  if (l->c) {
329  FreeM(l->c);
330  }
331  if (l->rbf) {
332  FreeM(l->rbf);
333  }
334  FreeM(l->d);
335  FreeM(l);
336 
337 }
338 
339 //==========================================================
340 // ANN_Init()
341 //----------------------------------------------------------
343 
346 int ANN_Init(ANN * ann)
347 {
348  // Add output layer
349  LISTITEM *item = LastListItem(ann->c);
350  Layer *l = NULL;
351 #ifdef ANN_DBUG
352  message("Initialising");
353 #endif
354  if (item) {
355  Layer *p = (Layer *) item->obj;
356  l = ANN_AddLayer(ann, p->n_outputs, ann->n_outputs, p->y);
357  } else {
358  l = ANN_AddLayer(ann, ann->n_inputs, ann->n_outputs,
359  ann->x);
360  }
361  if (l == NULL) {
362  Serror("Could not create final layer\n");
363  DeleteANN(ann);
364  return -1;
365  }
366  ann->y = l->y;
367  l->f = &linear;
368  l->f_d = &linear_d;
369  // ann->t = l->t;
370  return 0;
371 }
372 
373 
374 
375 //==========================================================
376 // ANN_Reset()
377 //----------------------------------------------------------
379 void ANN_Reset(ANN * ann)
380 {
381  LISTITEM *p = FirstListItem(ann->c);
382 
383  while (p) {
384  Layer *l = (Layer *) p->obj;
385  for (int i = 0; i < l->n_inputs + 1 /* bias */; i++) {
386  Connection *c = &l->c[i * l->n_outputs];
387  for (int j = 0; j < l->n_outputs; j++) {
388  c->e = 0.0;
389  c->dw = 0.0;
390  c++;
391  }
392  }
393  p = NextListItem (ann->c);
394  }
395 }
396 
397 
398 //==========================================================
399 // ANN_Input()
400 //----------------------------------------------------------
402 
406 real ANN_Input(ANN * ann, real * x)
407 {
408  LISTITEM *p = FirstListItem(ann->c);
409  Layer *first_layer = (Layer *) p->obj;
410  ann->x = x;
411  first_layer->x = x; // Setup input of first layer
412  // printf ("II: %f\n", ann->x[0]);
413  while (p) {
414  Layer *current_layer = (Layer *) p->obj;
415  // printf ("\tIII: %f\n", current_layer->x[0]);
416  current_layer->forward(current_layer, false);
417  p = NextListItem(ann->c);
418  }
419  return 0.0f;
420 }
421 
422 //==========================================================
423 // ANN_StochasticInput()
424 //----------------------------------------------------------
430 {
431  LISTITEM *p = FirstListItem(ann->c);
432  Layer *first_layer = (Layer *) p->obj;
433  ann->x = x;
434  first_layer->x = x; // Setup input of first layer
435  // printf ("II: %f\n", ann->x[0]);
436  while (p) {
437  Layer *current_layer = (Layer *) p->obj;
438  // printf ("\tIII: %f\n", current_layer->x[0]);
439  current_layer->forward(current_layer, true);
440  p = NextListItem(ann->c);
441  }
442  return 0.0f;
443 }
444 
445 //==========================================================
446 // ANN_CalculateLayerOutputs()
447 //----------------------------------------------------------
449 void ANN_CalculateLayerOutputs(Layer * current_layer, bool stochastic)
450 {
451  int i, j;
452  int n_inputs = current_layer->n_inputs;
453  int n_outputs = current_layer->n_outputs;
454  real *x = current_layer->x;
455  real *y = current_layer->y;
456  real *z = current_layer->z;
457  Connection *c;
458 
459  for (j = 0; j < n_outputs; j++) {
460  z[j] = 0.0f;
461  }
462  c = current_layer->c;
463  if (stochastic) {
464  for (i = 0; i < n_inputs; i++) {
465  for (j = 0; j < n_outputs; j++) {
466  // using uniform bounded..
467  real w = c->w + (urandom()-0.5f)*c->v ;
468  z[j] += x[i] * w;
469  c++;
470  }
471  }
472 
473  // bias
474  for (j = 0; j < n_outputs; j++) {
475  real w = c->w + (urandom()-0.5f)*c->v ;
476  z[j] += w;
477  c++;
478  }
479  } else {
480  for (i = 0; i < n_inputs; i++) {
481  for (j = 0; j < n_outputs; j++) {
482  z[j] += x[i] * c->w;
483  c++;
484  }
485  }
486 
487  // bias
488  for (j = 0; j < n_outputs; j++) {
489  z[j] += c->w;
490  c++;
491  }
492  }
493 
494  for (j = 0; j < n_outputs; j++) {
495  y[j] = current_layer->f(z[j]);
496  }
497 }
498 
499 //==========================================================
500 // ANN_RBFCalculateLayerOutputs()
501 //----------------------------------------------------------
503 void ANN_RBFCalculateLayerOutputs(Layer * current_layer, bool stochastic)
504 {
505  int i, j;
506  int n_inputs = current_layer->n_inputs;
507  int n_outputs = current_layer->n_outputs;
508  real *x = current_layer->x;
509  real *y = current_layer->y;
510  real *z = current_layer->z;
511  RBFConnection *c;
512 
513 
514  for (j = 0; j < n_outputs; j++) {
515  z[j] = 0.0f;
516  }
517 
518  c = current_layer->rbf;
519  for (i = 0; i < n_inputs; i++) {
520  real in = x[i];
521  for (j = 0; j < n_outputs; j++, c++) {
522  real o = (in - c->m) * c->w;
523  z[j] += o * o;
524  }
525  }
526 
527  for (j = 0; j < n_outputs; j++) {
528  z[j] = -0.5f * z[j];
529  y[j] = current_layer->f(z[j]);
530  }
531 }
532 
533 
534 //==========================================================
535 // ANN_Train() simple MSE training
536 //----------------------------------------------------------
537 
544 real ANN_Train(ANN * ann, real * x, real * t)
545 {
546  LISTITEM *p = LastListItem(ann->c);
547  Layer *l = (Layer *) p->obj;
548  real sum = 0.0f;
549  int j;
550 
551  ANN_Input(ann, x);
552 
553  for (j = 0; j < ann->n_outputs; j++) {
554  real f = l->f_d(ann->y[j]);
555  real e = t[j] - ann->y[j];
556  ann->error[j] = e;
557  ann->d[j] = e * f;
558  sum += e * e;
559  }
560 
561  l->backward(p, ann->d, ann->eligibility_traces, 0.0);
562 
563  return sum;
564 }
565 
566 
567 //==========================================================
568 // ANN_Delta_Train() Train with custom cost
569 //----------------------------------------------------------
571 
584 real ANN_Delta_Train(ANN * ann, real* delta, real TD)
585 {
586  LISTITEM *p = LastListItem(ann->c);
587  Layer *l = (Layer *) p->obj;
588  real sum = 0.0f;
589  int j;
590  //ANN_Input(ann, x);
591  for (j = 0; j < ann->n_outputs; j++) {
592  real f = l->f_d(ann->y[j]);
593  real e = delta[j];
594  ann->error[j] = e;
595  ann->d[j] = e * f;
596  sum += e * e;
597  }
598 
599  l->backward(p, ann->d, ann->eligibility_traces, TD);
600 
601  return sum;
602 }
603 
604 
605 
606 //==========================================================
607 // ANN_Backpropagate
608 //----------------------------------------------------------
610 real ANN_Backpropagate(LISTITEM * p, real * d, bool use_eligibility, real TD)
611 {
612  int i, j;
613  real f;
614  real a;
615  Layer *l = (Layer *) p->obj;
616  LISTITEM *back = p->prev;
617  Layer *back_layer = NULL;
618  a = l->a;
619  // message ("backing with in: %d",l->x);
620  if (back) {
621  //message ("and to prev");
622  back_layer = (Layer *) back->obj;
623  for (i = 0; i < l->n_inputs; i++) {
624  real der = 0.0f;
625 
626  Connection *c = &l->c[i * l->n_outputs];
627  for (j = 0; j < l->n_outputs; j++) {
628  der += c->w * d[j];
629  c++;
630  }
631  f = back_layer->f_d(l->x[i]);
632  der*=f;
633  l->d[i] = der;
634  }
635 
636  /* bias */
637  i = l->n_inputs;
638  l->d[i] = 0.0f;
639  Connection *c = &l->c[i * l->n_outputs];
640  for (j = 0; j < l->n_outputs; j++) {
641  l->d[i] += c->w * d[j];
642  c++;
643  }
644  f = back_layer->f_d(1.0f);
645  l->d[i] = l->d[i] * f;
646 
647  back_layer->backward(back, l->d, use_eligibility, TD);
648  }
649  //update weights
650  for (i = 0; i < l->n_inputs; i++) {
651  Connection *c = &l->c[i * l->n_outputs];
652  real dx = a * l->x[i];
653  if (l->batch_mode) {
654  for (j = 0; j < l->n_outputs; j++) {
655  real delta;
656  if (use_eligibility) {
657  c->e = c->e * l->lambda + l->x[i]* d[j];
658  delta = a * c->e * TD; //better?
659  c->v += (1.0f - l->zeta)*c->v+(l->zeta)*delta*delta;
660  } else {
661  delta = dx * d[j];
662  }
663  c->dw += delta;
664  c->v = (1.0f - l->zeta)*c->v + (l->zeta)*fabs(delta);
665  if (c->v < 0.01f) c->v = 0.01f;
666  c++;
667  }
668  } else {
669  for (j = 0; j < l->n_outputs; j++) {
670  real delta;
671  if (use_eligibility) {
672  c->e = c->e * l->lambda + l->x[i] * d[j];
673  delta = a * c->e * TD;
674  } else {
675  delta = dx * d[j];
676  }
677  c->w += delta;
678  delta /= a;
679  c->v = (1.0f - l->zeta)*c->v + (l->zeta)*fabs(delta);
680  //printf("%f\n", c->v);
681  if (c->v < 0.01f) c->v = 0.01f;
682  c++;
683  }
684  }
685  }
686  // update bias weight
687  {
688  Connection *c = &l->c[l->n_inputs * l->n_outputs];
689  if (l->batch_mode) {
690  for (j = 0; j < l->n_outputs; j++) {
691  real delta;
692  if (use_eligibility) {
693  c->e = c->e * l->lambda + d[j];
694  delta = a * c->e * TD;
695  } else {
696  delta = a * d[j];
697  }
698  c->dw += delta;
699  c->v = (1.0 - l->zeta)*c->v + (l->zeta)*fabs(delta);
700  if (c->v < 0.01f) c->v = 0.01f;
701  c++;
702  }
703  } else {
704  for (j = 0; j < l->n_outputs; j++) {
705  real delta;
706  if (use_eligibility) {
707  c->e = c->e * l->lambda + d[j];
708  delta = a * c->e * TD; //better?
709  } else {
710  delta = a * d[j];
711  }
712  c->w += delta;
713  c->v = (1.0f - l->zeta)*c->v + (l->zeta)*fabs(delta);
714  if (c->v < 0.01f) c->v = 0.01f;
715  c++;
716  }
717  }
718  }
719  return 0.0f;
720 }
721 
722 //==========================================================
723 // ANN_RBFBackpropagate
724 //----------------------------------------------------------
727 real ANN_RBFBackpropagate(LISTITEM * p, real * d, bool use_eligibility, real TD)
728 {
729  int i, j;
730  real f;
731  real a;
732  Layer *l = (Layer *) p->obj;
733  LISTITEM *back = p->prev;
734  Layer *back_layer = NULL;
735  a = l->a;
736 
737  if (back) {
738  back_layer = (Layer *) back->obj;
739  for (i = 0; i < l->n_inputs; i++) {
740  l->d[i] = 0.0f;
741  RBFConnection *c = &l->rbf[i * l->n_outputs];
742  for (j = 0; j < l->n_outputs; j++) {
743  real dx = l->x[i] - c->m;
744  real dm = d[j] * dx * c->w * c->w;
745  l->d[j] -= dm;
746  c++;
747  }
748  f = back_layer->f_d(l->x[i]);
749  l->d[i] = l->d[i] * f;
750  }
751  back_layer->backward(back, l->d, use_eligibility, TD);
752  }
753 
754  return 0.0f;
755  //update weights
756  for (i = 0; i < l->n_inputs; i++) {
757  RBFConnection *c = &l->rbf[i * l->n_outputs];
758  real dx = l->x[i] - c->m;
759  for (j = 0; j < l->n_outputs; j++) {
760  real dy = d[j];
761  real dx2 = a * dy * dx * c->w;
762  real dm = dx2 * c->w;
763  real dw = dx2 * dx;
764  c->m += dm;
765  c->w += dw;
766  c++;
767  }
768  }
769  return 0.0f;
770 }
771 
774 {
775  int i, j;
776 
777  if (l->batch_mode == false) {
778  Serror("Batch adapt yet not in batch mode!");
779  }
780  //update weights
781  for (i = 0; i < l->n_inputs; i++) {
782  Connection *c = &l->c[i * l->n_outputs];
783  for (j = 0; j < l->n_outputs; j++) {
784  c->w += c->dw;
785  c++;
786  }
787  }
788  // update bias weight
789  {
790  Connection *c = &l->c[l->n_inputs * l->n_outputs];
791  for (j = 0; j < l->n_outputs; j++) {
792  c->w += c->dw;
793  c++;
794  }
795  }
796 }
797 
798 //==========================================================
799 // ANN_Test()
800 //----------------------------------------------------------
802 real ANN_Test(ANN * ann, real * x, real * t)
803 {
804  //LISTITEM *p = LastListItem(ann->c);
805  //Layer *l = (Layer *) p->obj;
806  real sum = 0.0f;
807  int j;
808  ANN_Input(ann, x);
809 
810  for (j = 0; j < ann->n_outputs; j++) {
811  //real f = l->f_d(ann->y[j]);
812  real e = t[j] - ann->y[j];
813  ann->error[j] = e;
814  ann->d[j] =0.0;// e * f;
815  sum += e * e;
816  }
817  return sum;
818 }
819 
820 //==========================================================
821 // ANN_GetOutput()
822 //----------------------------------------------------------
825 {
826  return ann->y;
827 }
828 
829 //==========================================================
830 // ANN_GetError()
831 //----------------------------------------------------------
834 {
835  real sum = 0.0;
836  for (int i=0; i<ann->n_outputs; i++) {
837  real e = ann->error[i];
838  sum += e*e;
839  }
840  return (real) sqrt(sum);
841 }
842 
843 //==========================================================
844 // ANN_GetErrorVector()
845 //----------------------------------------------------------
848 {
849  return ann->error;
850 }
851 
852 //==========================================================
853 // ANN_SetLearningRate()
854 //----------------------------------------------------------
857 {
858  LISTITEM *c;
859 
860  ann->a = a;
861  c = FirstListItem(ann->c);
862  while (c) {
863  Layer *l = (Layer *) c->obj;
864  l->a = a;
865  c = NextListItem(ann->c);
866  }
867 }
868 //==========================================================
869 // ANN_SetLambda()
870 //----------------------------------------------------------
872 void ANN_SetLambda(ANN * ann, real lambda)
873 {
874  LISTITEM *c;
875 
876  ann->lambda = lambda;
877  c = FirstListItem(ann->c);
878  while (c) {
879  Layer *l = (Layer *) c->obj;
880  l->lambda = lambda;
881  c = NextListItem(ann->c);
882  }
883 }
884 
885 //==========================================================
886 // ANN_SetZeta()
887 //----------------------------------------------------------
890 void ANN_SetZeta(ANN * ann, real zeta)
891 {
892  LISTITEM *c;
893 
894  ann->zeta = zeta;
895  c = FirstListItem(ann->c);
896  while (c) {
897  Layer *l = (Layer *) c->obj;
898  l->zeta = zeta;
899  c = NextListItem(ann->c);
900  }
901 }
902 //==========================================================
903 // ANN_SetBatchMode
904 //----------------------------------------------------------
906 void ANN_SetBatchMode(ANN * ann, bool batch)
907 {
908  LISTITEM *c;
909 
910  ann->batch_mode = batch;
911  c = FirstListItem(ann->c);
912  while (c) {
913  Layer *l = (Layer *) c->obj;
914  l->batch_mode = batch;
915  c = NextListItem(ann->c);
916  }
917 }
918 
919 //==========================================================
920 // ANN_BatchAdapt
921 //----------------------------------------------------------
923 void ANN_BatchAdapt(ANN * ann)
924 {
925  LISTITEM *c;
926 
927  c = FirstListItem(ann->c);
928  while (c) {
929  Layer *l = (Layer *) c->obj;
931  c = NextListItem(ann->c);
932  }
933 }
934 
935 
936 
937 //==========================================================
938 // ANN_ShowWeights()
939 //----------------------------------------------------------
942 {
943  LISTITEM *c;
944  real sum = 0.0f;
945  c = FirstListItem(ann->c);
946  while (c) {
947  Layer *l = (Layer *) c->obj;
948  sum += ANN_LayerShowWeights(l);
949  c = NextListItem(ann->c);
950  }
951  return sum;
952 }
953 
954 //==========================================================
955 // ANN_LayerShowWeights()
956 //----------------------------------------------------------
959 {
960  int i, j;
961  Connection *c = l->c;
962  real sum = 0.0f;
963  for (i = 0; i < l->n_inputs + 1 /*bias */ ; i++) {
964  for (j = 0; j < l->n_outputs; j++) {
965  sum += (c->w) * (c->w); //printf ("%f ", c->w);
966  printf("%f ", c->w);
967  c++;
968  }
969  }
970  // printf ("%f ", sum);
971  return sum;
972 }
973 
974 
975 //==========================================================
976 // ANN_ShowInputs()
977 //----------------------------------------------------------
980 {
981  LISTITEM *c;
982  real sum = 0.0f;
983  c = FirstListItem(ann->c);
984  while (c) {
985  Layer *l = (Layer *) c->obj;
986  sum += ANN_LayerShowInputs(l);
987  c = NextListItem(ann->c);
988  }
989  return sum;
990 }
991 
992 //==========================================================
993 // ANN_LayerShowInputs()
994 //----------------------------------------------------------
997 {
998  int i;
999  real sum = 0.0f;
1000  for (i = 0; i < l->n_inputs; i++) {
1001  printf("#%f ", l->x[i]);
1002  }
1003  printf("-->");
1004  for (i = 0; i < l->n_outputs; i++) {
1005  printf("#(%f)%f ", l->f(l->z[i]), l->y[i]);
1006  }
1007 
1008  printf("\n");
1009  return sum;
1010 }
1011 
1012 
1013 //==========================================================
1014 // ANN_ShowOutputs()
1015 //----------------------------------------------------------
1018 {
1019  int i;
1020 
1021  for (i = 0; i < ann->n_outputs; i++) {
1022  printf("%f ", ann->y[i]);
1023  }
1024  printf("\n");
1025 }
1026 
1027 
1028 
1029 //==========================================================
1030 // ANN_SetOutputsToLinear()
1031 //----------------------------------------------------------
1034 {
1035  LISTITEM *c;
1036  c = LastListItem(ann->c);
1037  if (c) {
1038  Layer *l = (Layer *) c->obj;
1039  l->f = &linear;
1040  l->f_d = &linear_d;
1041  } else {
1042  Serror("Could not set outputs to linear\n");
1043  }
1044 }
1045 
1046 //==========================================================
1047 // ANN_SetOutputsToTanH()
1048 //----------------------------------------------------------
1051 {
1052  LISTITEM *c;
1053  c = LastListItem(ann->c);
1054  if (c) {
1055  Layer *l = (Layer *) c->obj;
1056  l->f = &htan;
1057  l->f_d = &htan_d;
1058  } else {
1059  Serror("Could not set outputs to TanH\n");
1060  }
1061 }
1062 
1063 
1064 //==========================================================
1065 // Exp()
1066 //----------------------------------------------------------
1069 {
1070  return (real) exp(x);
1071 }
1072 
1073 //==========================================================
1074 // Exp_d()
1075 //----------------------------------------------------------
1078 {
1079  return x;
1080 }
1081 
1082 //==========================================================
1083 // htan()
1084 //----------------------------------------------------------
1087 {
1088  return (real) tanh(x);
1089 }
1090 
1091 //==========================================================
1092 // htan_d()
1093 //----------------------------------------------------------
1096 {
1097  real f = (real) tanh(x);
1098  return (real) (1.0 - f * f);
1099 }
1100 
1101 //==========================================================
1102 // dtan()
1103 //----------------------------------------------------------
1106 {
1107  if (x>1.0) {
1108  return 1.0;
1109  } else if (x<1.0) {
1110  return -1.0;
1111  }
1112  return x;
1113 }
1114 
1115 //==========================================================
1116 // dtan_d()
1117 //----------------------------------------------------------
1120 {
1121  if (x>1.0) {
1122  return 0.0;
1123  } else if (x<-1.0) {
1124  return 0.0;
1125  }
1126  return 1.0;
1127 }
1128 
1129 //==========================================================
1130 // linear()
1131 //----------------------------------------------------------
1134 {
1135  return x;
1136 }
1137 
1138 //==========================================================
1139 // linear_d()
1140 //----------------------------------------------------------
1143 {
1144  return 1.0f;
1145 }
1146 
1148 static inline bool CheckMatchingToken (const char* tag, StringBuffer* buf, FILE* f)
1149 {
1150  int l = 1+strlen(tag);
1151  buf = SetStringBufferLength (buf, l);
1152  if (buf==NULL) {
1153  return false;
1154  }
1155  fread(buf->c, sizeof(char), l, f);
1156 
1157  if (strcmp(tag,buf->c)) {
1158  fprintf (stderr, "Expected tag <%s>, found <%s>.\n", tag, buf->c);
1159  return false;
1160  }
1161  return true;
1162 }
1163 
1165 static inline void WriteToken (const char* tag, FILE* f)
1166 {
1167  fwrite (tag, sizeof(char), 1+strlen(tag), f);
1168 }
1169 
1171 ANN* LoadANN(char* filename)
1172 {
1173  FILE* f = fopen (filename, "rb");
1174  if (f) {
1175  ANN* ann = LoadANN (f);
1176  fclose (f);
1177  return ann;
1178  }
1179  return NULL;
1180 }
1182 int SaveANN(ANN* ann, char* filename)
1183 {
1184  FILE* f = fopen (filename, "wb");
1185  if (f) {
1186  int r = SaveANN (ann, f);
1187  fclose (f);
1188  return r;
1189  }
1190  return -1;
1191 }
1192 
1194 ANN* LoadANN(FILE* f)
1195 {
1196  if (f==NULL) {
1197  return NULL;
1198  }
1199  StringBuffer* rtag = NewStringBuffer (256);
1200  CheckMatchingToken("VSOUND_ANN", rtag, f);
1201  int n_inputs;
1202  int n_outputs;
1203  fread(&n_inputs, sizeof(int), 1, f);
1204  fread(&n_outputs, sizeof(int), 1, f);
1205  ANN* ann = NewANN (n_inputs, n_outputs);
1206  CheckMatchingToken("Layer Data", rtag, f);
1207  int n_layers;
1208  fread(&n_layers, sizeof(int), 1, f);
1209  for (int i=0; i<n_layers-1; i++) {
1210  int layer_type;
1211  CheckMatchingToken("TYPE", rtag, f);
1212  fread(&layer_type, sizeof(int), 1, f);
1213  int nhu;
1214  CheckMatchingToken("UNITS", rtag, f);
1215  fread(&nhu, sizeof(int), 1, f);
1216  if (layer_type==0) {
1217  ANN_AddHiddenLayer(ann, nhu);
1218  } else {
1219  ANN_AddRBFHiddenLayer(ann, nhu);
1220  }
1221  }
1222  {
1223  int layer_type =0;
1224  ANN_Init(ann);
1225  CheckMatchingToken("Output Type", rtag, f);
1226  fread(&layer_type, sizeof(int), 1, f);
1227  if (layer_type==0) {
1228  ANN_SetOutputsToLinear(ann);
1229  } else {
1230  ANN_SetOutputsToTanH(ann);
1231  }
1232  }
1233 
1234  LISTITEM* list_item = FirstListItem(ann->c);
1235  while (list_item) {
1236  Layer* l = (Layer*) list_item->obj;
1237  CheckMatchingToken("Connections", rtag, f);
1238  int size = (l->n_inputs + 1 /*bias*/) * l->n_outputs;
1239  fread(l->c, size, sizeof(Connection), f);
1240  list_item = NextListItem (ann->c);
1241  }
1242  CheckMatchingToken("END", rtag, f);
1243 
1244  FreeStringBuffer (&rtag);
1245  return ann;
1246 }
1247 
1249 int SaveANN(ANN* ann, FILE* f)
1250 {
1251  if (f==NULL) {
1252  return -1;
1253  }
1254 
1255  StringBuffer* rtag = NewStringBuffer (256);
1256 
1257  WriteToken("VSOUND_ANN", f);
1258  fwrite(&ann->n_inputs, sizeof(int), 1, f);
1259  fwrite(&ann->n_outputs, sizeof(int), 1, f);
1260  WriteToken("Layer Data", f);
1261  int n_layers = 0;
1262  LISTITEM* list_item = FirstListItem(ann->c);
1263  while (list_item) {
1264  n_layers++;
1265  list_item = NextListItem (ann->c);
1266  }
1267  fwrite(&n_layers, sizeof(int), 1, f);
1268  list_item = FirstListItem(ann->c);
1269  for (int i=0; i<n_layers-1; i++) {
1270  Layer* l = (Layer*) list_item->obj;
1271 
1272  int layer_type = 0;
1273  WriteToken("TYPE", f);
1274  fwrite(&layer_type, sizeof(int), 1, f);
1275 
1276  int nhu = l->n_outputs;
1277  WriteToken("UNITS", f);
1278  fwrite(&nhu, sizeof(int), 1, f);
1279  list_item = NextListItem (ann->c);
1280  }
1281  WriteToken("Output Type", f);
1282  {
1283  int layer_type = 0;
1284  LISTITEM *c;
1285  c = LastListItem(ann->c);
1286  if (c) {
1287  Layer *l = (Layer *) c->obj;
1288  if (l->f==&linear) {
1289  layer_type = 0;
1290  } else {
1291  layer_type = 1;
1292  }
1293  }
1294  fwrite(&layer_type, sizeof(int), 1, f);
1295  }
1296  list_item = FirstListItem(ann->c);
1297  while(list_item) {
1298  Layer* l = (Layer*) list_item->obj;
1299  WriteToken("Connections", f);
1300  int size = (l->n_inputs + 1 /*bias*/) * l->n_outputs;
1301  fwrite(l->c, size, sizeof(Connection), f);
1302  list_item = NextListItem(ann->c);
1303  }
1304  WriteToken("END", f);
1305 
1306  FreeStringBuffer (&rtag);
1307  return 0;
1308 }
real dw
Weight-change.
Definition: ANN.h:51
real e
eligibility;
Definition: ANN.h:52
A list item.
Definition: List.h:20
int ANN_AddRBFHiddenLayer(ANN *ann, int n_nodes)
Add an RBF layer with n_nodes.
Definition: ANN.cpp:131
int ANN_Init(ANN *ann)
Initialise neural network.
Definition: ANN.cpp:346
real * x
inputs;
Definition: ANN.h:72
void ANN_SetOutputsToTanH(ANN *ann)
Set outputs to hyperbolic tangent.
Definition: ANN.cpp:1050
real ANN_Input(ANN *ann, real *x)
Give an input vector to the neural network.
Definition: ANN.cpp:406
Connection * c
connections
Definition: ANN.h:76
void * obj
data
Definition: List.h:21
real * z
activation
Definition: ANN.h:74
ANN * LoadANN(char *filename)
Load an ANN from a filename.
Definition: ANN.cpp:1171
real htan(real x)
Hyperbolic tangent hook.
Definition: ANN.cpp:1086
real htan_d(real x)
Hyperbolic tangent derivative hook.
Definition: ANN.cpp:1095
real linear(real x)
linear hook
Definition: ANN.cpp:1133
bool eligibility_traces
use eligibility
Definition: ANN.h:102
static bool CheckMatchingToken(const char *tag, StringBuffer *buf, FILE *f)
Check that tags match.
Definition: ANN.cpp:1148
real m
mean
Definition: ANN.h:64
void ANN_BatchAdapt(ANN *ann)
Adapt the parameters after a series of patterns has been seen.
Definition: ANN.cpp:923
void ANN_FreeLayer(void *l)
Free this layer - low level.
Definition: ANN.cpp:313
real ANN_LayerShowInputs(Layer *l)
Dump inputs to a particular layer on stdout.
Definition: ANN.cpp:996
real linear_d(real x)
linear derivative hook
Definition: ANN.cpp:1142
int c
connected?
Definition: ANN.h:49
real lambda
eligibility decay
Definition: ANN.h:79
real(* f_d)(real x)
derivative of activation function
Definition: ANN.h:85
int ClearList(LIST *list)
Clear the list.
Definition: List.cpp:291
void ANN_LayerBatchAdapt(Layer *l)
Perform batch adaptation.
Definition: ANN.cpp:773
#define FreeM(address)
Definition: learn_debug.h:31
void ANN_SetLearningRate(ANN *ann, real a)
Set the learning rate to a.
Definition: ANN.cpp:856
RBFConnection * rbf
rbf connections (if any)
Definition: ANN.h:77
real Exp(real x)
Exponential hook.
Definition: ANN.cpp:1068
real w
weight
Definition: ANN.h:50
StringBuffer * NewStringBuffer(int length)
Make a new stringbuffer.
real * d
delta vector
Definition: ANN.h:96
Some simple functions for string operations.
real(* backward)(LISTITEM *p, real *d, bool use_eligibility, real TD)
partial derivative calculation
Definition: ANN.h:83
#define Swarning
Definition: learn_debug.h:11
An RBF connection between two neural elements.
Definition: ANN.h:62
int n
number of items
Definition: List.h:41
#define DEC_ARG_INVALID
Definition: learn_debug.h:40
A linear connection between two neural elements.
Definition: ANN.h:48
void ANN_RBFCalculateLayerOutputs(Layer *current_layer, bool stochastic)
Calculate layer outputs.
Definition: ANN.cpp:503
LISTITEM * ListAppend(LIST *list, void *p)
Append an item to the list.
Definition: List.cpp:34
StringBuffer * SetStringBufferLength(StringBuffer *s, unsigned int l)
void ANN_SetZeta(ANN *ann, real zeta)
Set zeta, parameter variance smoothing.
Definition: ANN.cpp:890
int DeleteANN(ANN *ann)
Delete a neural network.
Definition: ANN.cpp:77
void ANN_Reset(ANN *ann)
Resets the eligbility traces and batch updates.
Definition: ANN.cpp:379
real ANN_StochasticInput(ANN *ann, real *x)
Stochastically generate an output, depending on parameter distributions.
Definition: ANN.cpp:429
real * x
unit inputs
Definition: ANN.h:93
real * ANN_GetOutput(ANN *ann)
Get the output for the current input.
Definition: ANN.cpp:824
real * ANN_GetErrorVector(ANN *ann)
Return the error vector for pattern.
Definition: ANN.cpp:847
static Point p[4]
Definition: Convex.cpp:54
int ANN_AddHiddenLayer(ANN *ann, int n_nodes)
Add a hidden layer with n_nodes.
Definition: ANN.cpp:111
LISTITEM * LastListItem(LIST *list)
Move to the last list item.
Definition: List.cpp:98
real zeta
variance update smoothness.
Definition: ANN.h:80
real * t
targets
Definition: ANN.h:95
real ANN_ShowWeights(ANN *ann)
Dump the weights on stdout.
Definition: ANN.cpp:941
void ANN_CalculateLayerOutputs(Layer *current_layer, bool stochastic)
Calculate layer outputs.
Definition: ANN.cpp:449
real ANN_Backpropagate(LISTITEM *p, real *d, bool use_eligibility, real TD)
d are the derivatives at the outputs.
Definition: ANN.cpp:610
real dtan(real x)
Discrete htan hook.
Definition: ANN.cpp:1105
char * c
This is the buffer.
Definition: string_utils.h:36
Layer * ANN_AddLayer(ANN *ann, int n_inputs, int n_outputs, real *x)
Low-level code to add a weighted sum layer.
Definition: ANN.cpp:152
real w
weight (= )
Definition: ANN.h:63
real ANN_ShowInputs(ANN *ann)
Dump inputs to all layers on stdout.
Definition: ANN.cpp:979
real(* f)(real x)
activation function
Definition: ANN.h:84
real dtan_d(real x)
Discrete htan derivative hook.
Definition: ANN.cpp:1119
real zeta
variance update smoothness
Definition: ANN.h:99
real ANN_Train(ANN *ann, real *x, real *t)
Perform mean square error training, where the aim is to minimise the cost function ...
Definition: ANN.cpp:544
real * y
unit activations
Definition: ANN.h:94
ANN * NewANN(int n_inputs, int n_outputs)
Create a new ANN.
Definition: ANN.cpp:25
bool batch_mode
do not update weights immediately
Definition: ANN.h:81
real a
learning rate
Definition: ANN.h:78
static Vector y[4]
Definition: Convex.cpp:56
real * error
errors
Definition: ANN.h:100
real a
learning rate
Definition: ANN.h:97
real ANN_GetError(ANN *ann)
Get the error for the current input/output pair.
Definition: ANN.cpp:833
void ANN_SetBatchMode(ANN *ann, bool batch)
Set batch updates.
Definition: ANN.cpp:906
LISTITEM * FirstListItem(LIST *list)
Move to the first list item.
Definition: List.cpp:81
bool batch_mode
use batch mode
Definition: ANN.h:101
real * d
derivatives
Definition: ANN.h:75
A collection of connections from one layer to another, plus management functions and data...
Definition: ANN.h:69
#define AllocM(type, x)
Definition: learn_debug.h:30
void ANN_SetLambda(ANN *ann, real lambda)
Set lambda, eligibility decay.
Definition: ANN.cpp:872
LISTITEM * NextListItem(LIST *list)
Advance one item.
Definition: List.cpp:67
void FreeStringBuffer(StringBuffer **s)
Given a pointer to a stringbuffer pointer, free it and clear it.
void ANN_ShowOutputs(ANN *ann)
Dump outputs to stdout.
Definition: ANN.cpp:1017
A very simple list structure.
Definition: List.h:37
real ANN_Test(ANN *ann, real *x, real *t)
Given an input and test pattern, return the MSE between the network&#39;s output and the test pattern...
Definition: ANN.cpp:802
int n_outputs
number of outputs
Definition: ANN.h:71
int n_inputs
number of inputs
Definition: ANN.h:90
int n_outputs
number of outputs
Definition: ANN.h:91
real v
variance estimate
Definition: ANN.h:53
static void WriteToken(const char *tag, FILE *f)
Write a token.
Definition: ANN.cpp:1165
#define Serror
Definition: learn_debug.h:10
real ANN_Delta_Train(ANN *ann, real *delta, real TD)
Minimise a custom cost function.
Definition: ANN.cpp:584
void(* forward)(struct Layer_ *current_layer, bool stochastic)
forward calculation
Definition: ANN.h:82
real Exp_d(real x)
Exponential derivative hook.
Definition: ANN.cpp:1077
real urandom()
int n_inputs
number of inputs
Definition: ANN.h:70
int SaveANN(ANN *ann, char *filename)
Save the ANN to a filename.
Definition: ANN.cpp:1182
A neural network implementation.
The StringBuffer structure stores buffers of strings.
Definition: string_utils.h:35
float real
Definition: real.h:13
void ANN_SetOutputsToLinear(ANN *ann)
Set outputs to linear.
Definition: ANN.cpp:1033
real lambda
eligibility trace decay
Definition: ANN.h:98
real ANN_RBFBackpropagate(LISTITEM *p, real *d, bool use_eligibility, real TD)
Backpropagation for an RBF layer.
Definition: ANN.cpp:727
ANN management structure.
Definition: ANN.h:89
real ANN_LayerShowWeights(Layer *l)
Dump the weights of a particular layer on stdout.
Definition: ANN.cpp:958
real * y
outputs
Definition: ANN.h:73
Layer * ANN_AddRBFLayer(ANN *ann, int n_inputs, int n_outputs, real *x)
Low-level code to add an RBF layer.
Definition: ANN.cpp:240
void message(const char *msg,...)
Prints a message.
LIST * c
connection layers
Definition: ANN.h:92