30 Serror(
"Could not allocate ANN\n");
50 Serror(
"Could not allocate errors\n");
56 Serror(
"Could not allocate derivatives\n");
61 if (!(ann->
c =
List())) {
62 Serror(
"Could not allocate list\n");
67 message(
"Creating ANN with %d inputs and %d outputs", n_inputs,
80 Swarning(
"Attempting to delete NULL ANN\n");
114 message(
"Adding Hidden layer with %d nodes", n_nodes);
134 message(
"Adding Hidden layer with %d nodes", n_nodes);
155 if ((x == NULL) && (ann->
c->
n)) {
157 (
"Layer connects to null but layer list is not empty\n");
162 Serror(
"Could not allocate layer structure\n");
166 assert(n_inputs > 0);
167 assert(n_outputs > 0);
183 Serror(
"Could not allocate layer outputs\n");
188 for (i=0; i<n_outputs; i++) {
193 Serror(
"Could not allocate layer activations\n");
197 for (i=0; i<n_outputs; i++) {
201 Serror(
"Could not allocate layer outputs\n");
205 for (i=0; i<n_inputs+1; i++) {
212 Serror(
"Could not allocate connections\n");
219 real bound = 2.0f / sqrt((
real) n_inputs);
220 for (i = 0; i < n_inputs + 1 ; i++) {
222 for (
int j = 0; j < n_outputs; j++) {
243 if ((x == NULL) && (ann->
c->
n)) {
245 (
"Layer connects to null and layer list not empty\n");
249 Serror(
"Could not allocate layer structure\n");
253 assert(n_inputs > 0);
254 assert(n_outputs > 0);
267 Serror(
"Could not allocate layer outputs\n");
273 Serror(
"Could not allocate layer activations\n");
279 Serror(
"Could not allocate layer outputs\n");
287 (n_inputs + 1 ) * n_outputs))) {
288 Serror(
"Could not allocate connections\n");
295 real bound = 2.0f / sqrt((
real) n_inputs);
296 for (
int i = 0; i < n_inputs + 1 ; i++) {
298 for (
int j = 0; j < n_outputs; j++) {
362 Serror(
"Could not create final layer\n");
385 for (
int i = 0; i < l->
n_inputs + 1 ; i++) {
416 current_layer->
forward(current_layer,
false);
439 current_layer->
forward(current_layer,
true);
452 int n_inputs = current_layer->
n_inputs;
453 int n_outputs = current_layer->
n_outputs;
454 real *x = current_layer->
x;
455 real *
y = current_layer->
y;
456 real *z = current_layer->
z;
459 for (j = 0; j < n_outputs; j++) {
462 c = current_layer->
c;
464 for (i = 0; i < n_inputs; i++) {
465 for (j = 0; j < n_outputs; j++) {
474 for (j = 0; j < n_outputs; j++) {
480 for (i = 0; i < n_inputs; i++) {
481 for (j = 0; j < n_outputs; j++) {
488 for (j = 0; j < n_outputs; j++) {
494 for (j = 0; j < n_outputs; j++) {
495 y[j] = current_layer->
f(z[j]);
506 int n_inputs = current_layer->
n_inputs;
507 int n_outputs = current_layer->
n_outputs;
508 real *x = current_layer->
x;
509 real *
y = current_layer->
y;
510 real *z = current_layer->
z;
514 for (j = 0; j < n_outputs; j++) {
518 c = current_layer->
rbf;
519 for (i = 0; i < n_inputs; i++) {
521 for (j = 0; j < n_outputs; j++, c++) {
522 real o = (in - c->
m) * c->
w;
527 for (j = 0; j < n_outputs; j++) {
529 y[j] = current_layer->
f(z[j]);
555 real e = t[j] - ann->
y[j];
617 Layer *back_layer = NULL;
622 back_layer = (
Layer *) back->obj;
631 f = back_layer->f_d(l->
x[i]);
641 l->
d[i] += c->
w * d[j];
644 f = back_layer->f_d(1.0f);
645 l->
d[i] = l->
d[i] * f;
647 back_layer->backward(back, l->
d, use_eligibility, TD);
652 real dx = a * l->
x[i];
656 if (use_eligibility) {
658 delta = a * c->
e * TD;
659 c->
v += (1.0f - l->
zeta)*c->
v+(l->
zeta)*delta*delta;
664 c->
v = (1.0f - l->
zeta)*c->
v + (l->
zeta)*fabs(delta);
665 if (c->
v < 0.01f) c->
v = 0.01f;
671 if (use_eligibility) {
672 c->
e = c->
e * l->
lambda + l->
x[i] * d[j];
673 delta = a * c->
e * TD;
679 c->
v = (1.0f - l->
zeta)*c->
v + (l->
zeta)*fabs(delta);
681 if (c->
v < 0.01f) c->
v = 0.01f;
692 if (use_eligibility) {
694 delta = a * c->
e * TD;
699 c->
v = (1.0 - l->
zeta)*c->
v + (l->
zeta)*fabs(delta);
700 if (c->
v < 0.01f) c->
v = 0.01f;
706 if (use_eligibility) {
708 delta = a * c->
e * TD;
713 c->
v = (1.0f - l->
zeta)*c->
v + (l->
zeta)*fabs(delta);
714 if (c->
v < 0.01f) c->
v = 0.01f;
734 Layer *back_layer = NULL;
738 back_layer = (
Layer *) back->obj;
744 real dm = d[j] * dx * c->
w * c->
w;
748 f = back_layer->f_d(l->
x[i]);
749 l->
d[i] = l->
d[i] * f;
751 back_layer->backward(back, l->
d, use_eligibility, TD);
761 real dx2 = a * dy * dx * c->
w;
762 real dm = dx2 * c->
w;
778 Serror(
"Batch adapt yet not in batch mode!");
812 real e = t[j] - ann->
y[j];
840 return (
real) sqrt(sum);
963 for (i = 0; i < l->
n_inputs + 1 ; i++) {
965 sum += (c->
w) * (c->
w);
1000 for (i = 0; i < l->
n_inputs; i++) {
1001 printf(
"#%f ", l->
x[i]);
1005 printf(
"#(%f)%f ", l->
f(l->
z[i]), l->
y[i]);
1022 printf(
"%f ", ann->
y[i]);
1042 Serror(
"Could not set outputs to linear\n");
1059 Serror(
"Could not set outputs to TanH\n");
1070 return (
real) exp(x);
1088 return (
real) tanh(x);
1098 return (
real) (1.0 - f * f);
1123 }
else if (x<-1.0) {
1150 int l = 1+strlen(tag);
1155 fread(buf->
c,
sizeof(
char), l, f);
1157 if (strcmp(tag,buf->
c)) {
1158 fprintf (stderr,
"Expected tag <%s>, found <%s>.\n", tag, buf->
c);
1167 fwrite (tag,
sizeof(
char), 1+strlen(tag), f);
1173 FILE* f = fopen (filename,
"rb");
1184 FILE* f = fopen (filename,
"wb");
1203 fread(&n_inputs,
sizeof(
int), 1, f);
1204 fread(&n_outputs,
sizeof(
int), 1, f);
1205 ANN* ann =
NewANN (n_inputs, n_outputs);
1208 fread(&n_layers,
sizeof(
int), 1, f);
1209 for (
int i=0; i<n_layers-1; i++) {
1212 fread(&layer_type,
sizeof(
int), 1, f);
1215 fread(&nhu,
sizeof(
int), 1, f);
1216 if (layer_type==0) {
1226 fread(&layer_type,
sizeof(
int), 1, f);
1227 if (layer_type==0) {
1258 fwrite(&ann->
n_inputs,
sizeof(
int), 1, f);
1259 fwrite(&ann->
n_outputs,
sizeof(
int), 1, f);
1267 fwrite(&n_layers,
sizeof(
int), 1, f);
1269 for (
int i=0; i<n_layers-1; i++) {
1274 fwrite(&layer_type,
sizeof(
int), 1, f);
1278 fwrite(&nhu,
sizeof(
int), 1, f);
1294 fwrite(&layer_type,
sizeof(
int), 1, f);
int ANN_AddRBFHiddenLayer(ANN *ann, int n_nodes)
Add an RBF layer with n_nodes.
int ANN_Init(ANN *ann)
Initialise neural network.
void ANN_SetOutputsToTanH(ANN *ann)
Set outputs to hyperbolic tangent.
real ANN_Input(ANN *ann, real *x)
Give an input vector to the neural network.
Connection * c
connections
ANN * LoadANN(char *filename)
Load an ANN from a filename.
real htan(real x)
Hyperbolic tangent hook.
real htan_d(real x)
Hyperbolic tangent derivative hook.
real linear(real x)
linear hook
bool eligibility_traces
use eligibility
static bool CheckMatchingToken(const char *tag, StringBuffer *buf, FILE *f)
Check that tags match.
void ANN_BatchAdapt(ANN *ann)
Adapt the parameters after a series of patterns has been seen.
void ANN_FreeLayer(void *l)
Free this layer - low level.
real ANN_LayerShowInputs(Layer *l)
Dump inputs to a particular layer on stdout.
real linear_d(real x)
linear derivative hook
real lambda
eligibility decay
real(* f_d)(real x)
derivative of activation function
int ClearList(LIST *list)
Clear the list.
void ANN_LayerBatchAdapt(Layer *l)
Perform batch adaptation.
void ANN_SetLearningRate(ANN *ann, real a)
Set the learning rate to a.
RBFConnection * rbf
rbf connections (if any)
real Exp(real x)
Exponential hook.
StringBuffer * NewStringBuffer(int length)
Make a new stringbuffer.
Some simple functions for string operations.
real(* backward)(LISTITEM *p, real *d, bool use_eligibility, real TD)
partial derivative calculation
An RBF connection between two neural elements.
A linear connection between two neural elements.
void ANN_RBFCalculateLayerOutputs(Layer *current_layer, bool stochastic)
Calculate layer outputs.
LISTITEM * ListAppend(LIST *list, void *p)
Append an item to the list.
StringBuffer * SetStringBufferLength(StringBuffer *s, unsigned int l)
void ANN_SetZeta(ANN *ann, real zeta)
Set zeta, parameter variance smoothing.
int DeleteANN(ANN *ann)
Delete a neural network.
void ANN_Reset(ANN *ann)
Resets the eligbility traces and batch updates.
real ANN_StochasticInput(ANN *ann, real *x)
Stochastically generate an output, depending on parameter distributions.
real * ANN_GetOutput(ANN *ann)
Get the output for the current input.
real * ANN_GetErrorVector(ANN *ann)
Return the error vector for pattern.
int ANN_AddHiddenLayer(ANN *ann, int n_nodes)
Add a hidden layer with n_nodes.
LISTITEM * LastListItem(LIST *list)
Move to the last list item.
real zeta
variance update smoothness.
real ANN_ShowWeights(ANN *ann)
Dump the weights on stdout.
void ANN_CalculateLayerOutputs(Layer *current_layer, bool stochastic)
Calculate layer outputs.
real ANN_Backpropagate(LISTITEM *p, real *d, bool use_eligibility, real TD)
d are the derivatives at the outputs.
real dtan(real x)
Discrete htan hook.
char * c
This is the buffer.
Layer * ANN_AddLayer(ANN *ann, int n_inputs, int n_outputs, real *x)
Low-level code to add a weighted sum layer.
real ANN_ShowInputs(ANN *ann)
Dump inputs to all layers on stdout.
real(* f)(real x)
activation function
real dtan_d(real x)
Discrete htan derivative hook.
real zeta
variance update smoothness
real ANN_Train(ANN *ann, real *x, real *t)
Perform mean square error training, where the aim is to minimise the cost function ...
ANN * NewANN(int n_inputs, int n_outputs)
Create a new ANN.
bool batch_mode
do not update weights immediately
real ANN_GetError(ANN *ann)
Get the error for the current input/output pair.
void ANN_SetBatchMode(ANN *ann, bool batch)
Set batch updates.
LISTITEM * FirstListItem(LIST *list)
Move to the first list item.
bool batch_mode
use batch mode
A collection of connections from one layer to another, plus management functions and data...
void ANN_SetLambda(ANN *ann, real lambda)
Set lambda, eligibility decay.
LISTITEM * NextListItem(LIST *list)
Advance one item.
void FreeStringBuffer(StringBuffer **s)
Given a pointer to a stringbuffer pointer, free it and clear it.
void ANN_ShowOutputs(ANN *ann)
Dump outputs to stdout.
A very simple list structure.
real ANN_Test(ANN *ann, real *x, real *t)
Given an input and test pattern, return the MSE between the network's output and the test pattern...
int n_outputs
number of outputs
int n_inputs
number of inputs
int n_outputs
number of outputs
static void WriteToken(const char *tag, FILE *f)
Write a token.
real ANN_Delta_Train(ANN *ann, real *delta, real TD)
Minimise a custom cost function.
void(* forward)(struct Layer_ *current_layer, bool stochastic)
forward calculation
real Exp_d(real x)
Exponential derivative hook.
int n_inputs
number of inputs
int SaveANN(ANN *ann, char *filename)
Save the ANN to a filename.
A neural network implementation.
The StringBuffer structure stores buffers of strings.
void ANN_SetOutputsToLinear(ANN *ann)
Set outputs to linear.
real lambda
eligibility trace decay
real ANN_RBFBackpropagate(LISTITEM *p, real *d, bool use_eligibility, real TD)
Backpropagation for an RBF layer.
ANN management structure.
real ANN_LayerShowWeights(Layer *l)
Dump the weights of a particular layer on stdout.
Layer * ANN_AddRBFLayer(ANN *ann, int n_inputs, int n_outputs, real *x)
Low-level code to add an RBF layer.
void message(const char *msg,...)
Prints a message.
LIST * c
connection layers