?? art2.c
字號:
}
/***********************************************************************
The function prop_though propagates the information at the output of
the F1 layer to the F2 layer. After propagating the information, the
function activates the F2 layer in preparation for the top-down
processing. Finally, the function performs the top-down pattern
propagation back to F1, adjusting sublayers p and r after the top-down
pattern propagation.
************************************************************************/
void prop_through (art2 *n)
{
int i;
float *p, *u, *r, magU, magP;
propagate (F1, F2);
activate (F2);
propagate (F2, F1);
p = n->f1.p;
u = n->f1.u;
r = n->f1.r;
for (i=0; i<F1->units; i++) p[i] = u[i] + F1->outputs[i];
magU = mag (u, F1->units);
magP = mag (p, F1->units);
for (i=0; i<F1->units; i++) r[i] = (u[i] + n->C * p[i]) / (e + magU + n->C * magP);
}
/***********************************************************************
The function compare_patterns determines the degree of match between
the STM on layer F1 and the original input pattern. The function
returns the ratio between the magnitude of these two vectors as
determined by counting the number of active units on each layer.
************************************************************************/
float compare_patterns (art2 *n)
{
int i;
float magR;
magR = mag (n->f1.r, F1->units);
return (n->rho / (e + magR));
}
/***********************************************************************
The function adjust_bottom_up_weights adapts the connections between
the F1 and F2 layers to encode a new memory pattern for F2 to recognize.
The input argument is the network structure. All values requiring
updates are modified as part of this routine, and no return value is
produced.
************************************************************************/
void adjust_bottom_up_weights (art2 *n)
{
int i, winner;
float *wts, *u;
winner = F2->processed;
wts = F2->connects[winner];
u = n->f1.u;
for (i=0; i<F1->units; i++) wts[i] = u[i] / (1.0 - n->D);
}
/***********************************************************************
The function adjust_top_down_weights adapts the connections between
the F2 and F1 layers to encode a new LTM pattern for comparison to
a new input. The input argument is the network structure. All values
requiring updates are modified as part of this routine, and no return
value is produced.
************************************************************************/
void adjust_top_down_weights (art2 *n)
{
int i, j, winner;
float *wts, **connects, *u;
winner = F2->processed;
connects = F1->connects;
u = n->f1.u;
for (i=0; i<F1->units; i++)
{
wts = connects[i];
wts[winner] = u[i] / (1.0 - n->D);
}
}
/***********************************************************************
The function inhibit is defined to prevent a competitive unit that has
won the competition from participating in the competition again. This
function is necessary when training the competitive F2 layer of the ART2
to prevent one unit from constantly winning, and therefore causing the
network to never learn the exemplars. The mechanism for inhibiting a
unit is to temporarily set all of the input connection weights to that
unit to a value of 0, thus preventing the unit from being the best match
on the next competition. The value returned is the pointer to the true
weight array for the unit.
***********************************************************************/
float *inhibit (int unit, layer *l, float *zeros)
{
float *wts;
wts = l->connects[unit];
l->connects[unit] = zeros;
return (wts);
}
/***********************************************************************
The function save_art2 creates a save filename from the training file
name by adding a ".ar2" extension to the file, then calls save_net to
save the network configuration data to the file. The network saved by
this function can be recreated by using the restore_art2 function
defined later.
************************************************************************/
void save_art2 (art2 *n)
{
char *outfile;
FILE *fp;
int i, j, dotpos;
outfile = (char *)&n->filename;
dotpos = position ('.', outfile);
if (dotpos >= 0) outfile[dotpos] = '\0';
strcat (outfile, ".ar2");
if (!(fp = fopen (outfile, "w")))
{
printf ("\nERROR: Could not open the file \"%s\" for data storage.", outfile);
exit (0);
}
fwrite (&n->layers, sizeof(int), 1, fp);
for (i=0; i<n->layers; i++) fwrite (&LAYER[i]->units, sizeof(int), 1, fp);
for (i=0; i<F1->units; i++)
fwrite (F1->connects[i], sizeof(float), F2->units, fp);
fwrite (&F1->modifier, sizeof(float), 1, fp);
for (i=0; i<F2->units; i++)
fwrite (F2->connects[i], sizeof(float), F1->units, fp);
fwrite (&F2->modifier, sizeof(float), 1, fp);
fclose (fp);
}
/***********************************************************************
The function restore_art2 recreates a ART2 structure in memory from data
saved in a file. Note that only connection weight data is restored.
If the ART2 uses activation functions other than the default, these must
be reinstalled after recreating the network by using the set_parameters
command.
************************************************************************/
art2 *restore_art2 (char *filename)
{
art2 *n;
int *ldata, layers, i;
FILE *fp;
if (!(fp = fopen (filename, "r")))
{
printf ("\nERROR: Could not open configuration file \"%s\"", filename);
exit (0);
}
fread (&layers, sizeof(int), 1, fp);
ldata = (int *) calloc (layers+1, sizeof(int));
for (i=1; i<=layers; i++)
fread (&ldata[i], sizeof(int), 1, fp);
ldata[0] = layers;
n = build_art2 (ldata);
free (ldata);
connect (F1, F2, COMPLETE, RANDOM);
connect (F2, F1, COMPLETE, RANDOM);
for (i=0; i<F1->units; i++)
fread (F1->connects[i], sizeof(float), F2->units, fp);
fread (&F1->modifier, sizeof(float), 1, fp);
for (i=0; i<F2->units; i++)
fread (F2->connects[i], sizeof(float), F1->units, fp);
fread (&F2->modifier, sizeof(float), 1, fp);
fclose (fp);
return (n);
}
/***********************************************************************
The function train_net opens an exemplar file, loads the exemplars,
and proceeds to train the given network until all of the input
exemplars are learned. If the exemplars contain a corresponding
output, this value is ignored.
************************************************************************/
int train_net (art2 *n, char *filename)
{
int i, j, pattern, winner;
float *p, degree_of_match, **savewts, *zeros;
strcpy (n->filename, filename);
n->exemplars = load_exemplars (filename, n->patterns);
if (!valid_exemplars (n))
{
printf ("\NError: Exemplars do not match network size!");
exit (0);
}
savewts = (float **) calloc (F2->units, sizeof (float *));
zeros = (float *) calloc (F1->units, sizeof (float));
for (i=0; i<F1->units; i++) zeros[i] = 0.0;
for (i=0; i<2; i++)
for (pattern=0; pattern<n->exemplars; pattern++)
{
for (j=0; j<F2->units; j++) savewts[j] = F2->connects[j];
for (;;)
{
apply_input (n, get_invec (n, pattern));
prop_through (n);
degree_of_match = compare_patterns (n);
if (degree_of_match <= 1.0) break;
winner = F2->processed;
inhibit (winner, F2, zeros);
}
for (j=0; j<F2->units; j++) F2->connects[j] = savewts[j];
adjust_bottom_up_weights (n);
adjust_top_down_weights (n);
}
free (savewts);
free (zeros);
return (TRUE);
}
/***********************************************************************
The function set_parameters initializes a layer on the ART2. Arguments
to this function are a pointer to the layer, the propagation function
to be used during feed-forward propagation, the activation function for
the layer, and values for the learning rate and momentum terms.
************************************************************************/
void set_parameters (layer *l, pfn p, afn a, float m)
{
set_propagation (l, p);
set_activation (l, a, m);
}
/***********************************************************************
The function set_art2_parameters initializes the learning parameters
for the ART2 network. Arguments to this function are a pointer to the
network structure, and values for each parameter in the ART2 model.
************************************************************************/
void set_art2_parameters (art2 *n, float a, float b, float c, float d, float theta, float rho)
{
n->A = a;
n->B = b;
n->C = c;
n->D = d;
n->theta = theta;
n->rho = rho;
}
/***********************************************************************
We are now able to create and train an ART2, using the functions that we
have defined up until now. We begin by creating a dynamic structure to
specify the number of layers in the network, and the number of units on
each layer. We then create an ART2 structure in memory, connect the layers
appropriately, initialize each layer, and train the network using data
in the example file "xor.dat" Once training has been completed, we
deallocate the network structure and return to the OS.
************************************************************************/
void main ()
{
art2 *n;
int *layers;
layers = define_layers (2, 3, 4);
n = build_art2 (layers);
set_art2_parameters (n, 10, 10, 0.1, 0.9, 0.2, 0.9);
F1->initval = 0.0; /* top down weights are initialized to zero */
F2->initval = 1.0 / ((1.0 - n->D) * sqrt ((float) F1->units));
connect (F1, F2, COMPLETE, VALUE);
connect (F2, F1, COMPLETE, VALUE);
set_parameters (F1, DOT_PRODUCT, LINEAR, 1.0);
set_parameters (F2, DOT_PRODUCT, ON_CENTER, 0.0);
if (train_net (n, "art2test.dat")) show_net (n); /* save_art2 (n); */
free (layers);
destroy_art2 (n);
}
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -