%!PS-Adobe-3.0
%%Title: (lia finland)
%%Creator: (Microsoft Word: LaserWriter 8 8.2)
%%CreationDate: (8:58 AM Friday, July 14, 1995)
%%For: (Tony)
%%Pages: 7
%%DocumentFonts: Times-Roman Times-Italic Times-Bold Symbol
%%DocumentNeededFonts: Times-Roman Times-Italic Times-Bold Symbol
%%DocumentSuppliedFonts:
%%DocumentData: Clean7Bit
%%PageOrder: Ascend
%%Orientation: Portrait
%%DocumentMedia: Default 612 792 0 () ()
%ADO_ImageableArea: 30 31 582 761
%%EndComments
userdict begin/dscInfo 5 dict dup begin
/Title(lia finland)def
/Creator(Microsoft Word: LaserWriter 8 8.2)def
/CreationDate(8:58 AM Friday, July 14, 1995)def
/For(Tony)def
/Pages 1 def
end def end
/md 142 dict def md begin/currentpacking where {pop /sc_oldpacking currentpacking def true setpacking}if
%%BeginFile: adobe_psp_basic
%%Copyright: Copyright 1990-1993 Adobe Systems Incorporated. All Rights Reserved.
/bd{bind def}bind def
/xdf{exch def}bd
/xs{exch store}bd
/ld{load def}bd
/Z{0 def}bd
/T/true
/F/false
/:L/lineto
/lw/setlinewidth
/:M/moveto
/rl/rlineto
/rm/rmoveto
/:C/curveto
/:T/translate
/:K/closepath
/:mf/makefont
/gS/gsave
/gR/grestore
/np/newpath
14{ld}repeat
/$m matrix def
/av 81 def
/por true def
/normland false def
/psb-nosave{}bd
/pse-nosave{}bd
/us Z
/psb{/us save store}bd
/pse{us restore}bd
/level2
/languagelevel where
{
pop languagelevel 2 ge
}{
false
}ifelse
def
/featurecleanup
{
stopped
cleartomark
countdictstack exch sub dup 0 gt
{
{end}repeat
}{
pop
}ifelse
}bd
/noload Z
/startnoload
{
{/noload save store}if
}bd
/endnoload
{
{noload restore}if
}bd
level2 startnoload
/setjob
{
statusdict/jobname 3 -1 roll put
}bd
/setcopies
{
userdict/#copies 3 -1 roll put
}bd
level2 endnoload level2 not startnoload
/setjob
{
1 dict begin/JobName xdf currentdict end setuserparams
}bd
/setcopies
{
1 dict begin/NumCopies xdf currentdict end setpagedevice
}bd
level2 not endnoload
/pm Z
/mT Z
/sD Z
/realshowpage Z
/initializepage
{
/pm save store mT concat
}bd
/endp
{
pm restore showpage
}def
/$c/DeviceRGB def
/rectclip where
{
pop/rC/rectclip ld
}{
/rC
{
np 4 2 roll
:M
1 index 0 rl
0 exch rl
neg 0 rl
:K
clip np
}bd
}ifelse
/rectfill where
{
pop/rF/rectfill ld
}{
/rF
{
gS
np
4 2 roll
:M
1 index 0 rl
0 exch rl
neg 0 rl
fill
gR
}bd
}ifelse
/rectstroke where
{
pop/rS/rectstroke ld
}{
/rS
{
gS
np
4 2 roll
:M
1 index 0 rl
0 exch rl
neg 0 rl
:K
stroke
gR
}bd
}ifelse
%%EndFile
%%BeginFile: adobe_psp_colorspace_level1
%%Copyright: Copyright 1991-1993 Adobe Systems Incorporated. All Rights Reserved.
/G/setgray ld
/:F/setrgbcolor ld
%%EndFile
%%BeginFile: adobe_psp_uniform_graphics
%%Copyright: Copyright 1990-1993 Adobe Systems Incorporated. All Rights Reserved.
/@a
{
np :M 0 rl :L 0 exch rl 0 rl :L fill
}bd
/@b
{
np :M 0 rl 0 exch rl :L 0 rl 0 exch rl fill
}bd
/arct where
{
pop
}{
/arct
{
arcto pop pop pop pop
}bd
}ifelse
/x1 Z
/x2 Z
/y1 Z
/y2 Z
/rad Z
/@q
{
/rad xs
/y2 xs
/x2 xs
/y1 xs
/x1 xs
np
x2 x1 add 2 div y1 :M
x2 y1 x2 y2 rad arct
x2 y2 x1 y2 rad arct
x1 y2 x1 y1 rad arct
x1 y1 x2 y1 rad arct
fill
}bd
/@s
{
/rad xs
/y2 xs
/x2 xs
/y1 xs
/x1 xs
np
x2 x1 add 2 div y1 :M
x2 y1 x2 y2 rad arct
x2 y2 x1 y2 rad arct
x1 y2 x1 y1 rad arct
x1 y1 x2 y1 rad arct
:K
stroke
}bd
/@i
{
np 0 360 arc fill
}bd
/@j
{
gS
np
:T
scale
0 0 .5 0 360 arc
fill
gR
}bd
/@e
{
np
0 360 arc
:K
stroke
}bd
/@f
{
np
$m currentmatrix
pop
:T
scale
0 0 .5 0 360 arc
:K
$m setmatrix
stroke
}bd
/@k
{
gS
np
:T
0 0 :M
0 0 5 2 roll
arc fill
gR
}bd
/@l
{
gS
np
:T
0 0 :M
scale
0 0 .5 5 -2 roll arc
fill
gR
}bd
/@m
{
np
arc
stroke
}bd
/@n
{
np
$m currentmatrix
pop
:T
scale
0 0 .5 5 -2 roll arc
$m setmatrix
stroke
}bd
%%EndFile
%%BeginFile: adobe_psp_basic_text
%%Copyright: Copyright 1990-1993 Adobe Systems Incorporated. All Rights Reserved.
/S/show ld
/A{
0.0 exch ashow
}bd
/R{
0.0 exch 32 exch widthshow
}bd
/W{
0.0 3 1 roll widthshow
}bd
/J{
0.0 32 4 2 roll 0.0 exch awidthshow
}bd
/V{
0.0 4 1 roll 0.0 exch awidthshow
}bd
/fcflg true def
/fc{
fcflg{
vmstatus exch sub 50000 lt{
(%%[ Warning: Running out of memory ]%%\r)print flush/fcflg false store
}if pop
}if
}bd
/$f[1 0 0 -1 0 0]def
/:ff{$f :mf}bd
/MacEncoding StandardEncoding 256 array copy def
MacEncoding 39/quotesingle put
MacEncoding 96/grave put
/Adieresis/Aring/Ccedilla/Eacute/Ntilde/Odieresis/Udieresis/aacute
/agrave/acircumflex/adieresis/atilde/aring/ccedilla/eacute/egrave
/ecircumflex/edieresis/iacute/igrave/icircumflex/idieresis/ntilde/oacute
/ograve/ocircumflex/odieresis/otilde/uacute/ugrave/ucircumflex/udieresis
/dagger/degree/cent/sterling/section/bullet/paragraph/germandbls
/registered/copyright/trademark/acute/dieresis/notequal/AE/Oslash
/infinity/plusminus/lessequal/greaterequal/yen/mu/partialdiff/summation
/product/pi/integral/ordfeminine/ordmasculine/Omega/ae/oslash
/questiondown/exclamdown/logicalnot/radical/florin/approxequal/Delta/guillemotleft
/guillemotright/ellipsis/space/Agrave/Atilde/Otilde/OE/oe
/endash/emdash/quotedblleft/quotedblright/quoteleft/quoteright/divide/lozenge
/ydieresis/Ydieresis/fraction/currency/guilsinglleft/guilsinglright/fi/fl
/daggerdbl/periodcentered/quotesinglbase/quotedblbase/perthousand
/Acircumflex/Ecircumflex/Aacute/Edieresis/Egrave/Iacute/Icircumflex/Idieresis/Igrave
/Oacute/Ocircumflex/apple/Ograve/Uacute/Ucircumflex/Ugrave/dotlessi/circumflex/tilde
/macron/breve/dotaccent/ring/cedilla/hungarumlaut/ogonek/caron
MacEncoding 128 128 getinterval astore pop
level2 startnoload
/copyfontdict
{
findfont dup length dict
begin
{
1 index/FID ne{def}{pop pop}ifelse
}forall
}bd
level2 endnoload level2 not startnoload
/copyfontdict
{
findfont dup length dict
copy
begin
}bd
level2 not endnoload
md/fontname known not{
/fontname/customfont def
}if
/Encoding Z
/:mre
{
copyfontdict
/Encoding MacEncoding def
fontname currentdict
end
definefont :ff def
}bd
/:bsr
{
copyfontdict
/Encoding Encoding 256 array copy def
Encoding dup
}bd
/pd{put dup}bd
/:esr
{
pop pop
fontname currentdict
end
definefont :ff def
}bd
/scf
{
scalefont def
}bd
/scf-non
{
$m scale :mf setfont
}bd
/ps Z
/fz{/ps xs}bd
/sf/setfont ld
/cF/currentfont ld
/mbf
{
/makeblendedfont where
{
pop
makeblendedfont
/ABlend exch definefont
}{
pop
}ifelse
def
}def
%%EndFile
/currentpacking where {pop sc_oldpacking setpacking}if end
%%EndProlog
%%BeginSetup
md begin
countdictstack[{
%%BeginFeature: *ManualFeed False
level2 {1 dict dup /ManualFeed false put setpagedevice}{statusdict begin /manualfeed false store end} ifelse
%%EndFeature
}featurecleanup
countdictstack[{
%%BeginFeature: *InputSlot Upper
%%EndFeature
}featurecleanup
countdictstack[{
%%BeginFeature: *PageRegion LetterSmall
level2 {
2 dict dup /PageSize [612 792] put dup /ImagingBBox [30 31 582 761] put setpagedevice
}{
/lettersmall where {pop lettersmall} {letterR} ifelse
} ifelse
%%EndFeature
}featurecleanup
(Tony)setjob
/mT[1 0 0 -1 30 761]def
/sD 16 dict def
300 level2{1 dict dup/WaitTimeout 4 -1 roll put setuserparams}{statusdict/waittimeout 3 -1 roll put}ifelse
%%IncludeFont: Times-Roman
%%IncludeFont: Times-Italic
%%IncludeFont: Times-Bold
%%IncludeFont: Symbol
/f0_1/Times-Roman
:mre
/f0_12 f0_1 12 scf
/f0_10 f0_1 10 scf
/f1_1/Times-Italic
:mre
/f1_12 f1_1 12 scf
/f1_10 f1_1 10 scf
/f2_1/Times-Bold
:mre
/f2_12 f2_1 12 scf
/f3_1/Symbol
:bsr
240/apple pd
:esr
/f3_12 f3_1 12 scf
/Courier findfont[10 0 0 -10 0 0]:mf setfont
%%EndSetup
%%Page: 1 1
%%BeginPageSetup
initializepage
(Tony; page: 1 of 7)setjob
%%EndPageSetup
gS 0 0 552 730 rC
42 14 :M
f0_10 sf
.084 .008(In )J
f1_10 sf
.235 .023(Artificial Neural Networks)J
f0_10 sf
.169 .017(, Kohonen, et. al. \(eds\), Elsevier Science Publishers, pp. 729-734, 1991.)J
91 83 :M
f0_12 sf
-.151(AN EFFICIENT STATIC TOPOLOGY FOR MODELING ASOCS)A
91 115 :M
-.053(George L. Rudolph and Tony R. Martinez)A
91 139 :M
-.17(Department of Computer Science)A
91 151 :M
-.1(Brigham Young University)A
91 163 :M
.58 .058(Provo, Utah, USA, 84602)J
91 195 :M
.759 .076(ASOCS \(Adaptive Self-Organizing Concurrent Systems\) are a class of connectionist)J
91 207 :M
-.136(computational models which feature self-organized learn-ing and parallel execution. One)A
91 219 :M
.225 .022(area of ASOCS research is the development of an efficient implementation of ASOCS)J
91 231 :M
-.037(using current technology. A result of this research is the Location-Independent ASOCS)A
91 243 :M
-.014(model \(LIA\). LIA uses a parallel, asynchronous network of in-dependent nodes, which)A
91 255 :M
.267 .027(leads to an efficient physical realization using current technology. This paper reviews)J
91 267 :M
.155 .016(the behavior of the LIA model, and shows how a static binary tree topology efficiently)J
91 279 :M
.97 .097(supports that behavior. The binary tree topology allows for )J
f1_12 sf
.276(O\(log\(n\)\))A
f0_12 sf
1.058 .106( learning and)J
91 291 :M
-.057(execution times, where )A
f1_12 sf
-.069(n)A
f0_12 sf
-.057( is the number of nodes in the network.)A
42 329 :M
-.042(1. INTRODUCTION)A
60 347 :M
3.004 .3(ASOCS \(Adaptive Self-Organizing Concurrent Systems\) is a class of connectionist)J
42 359 :M
1.763 .176(computational models [2],[4]. ASOCS models support efficient computation through self-)J
42 371 :M
1.017 .102(organized learning and parallel execution. ASOCS has the same overall goals as other ANN)J
42 383 :M
.655 .065(\(artificial neural network\) models [8]. However, the underlying learning and execution mecha-)J
42 395 :M
.078 .008(nisms for ASOCS differ significantly from those of other ANN models. Current ASOCS models)J
42 407 :M
1.751 .175(are based on networks of digital nodes rather than analog nodes. The networks store and)J
42 419 :M
-.035(manipulate digital representations of data. ASOCS learning is incremental rather than training-set)A
42 431 :M
-.001(style. Examples are incrementally presented and incorporated into an adaptive logic network in a)A
42 443 :M
.206 .021(parallel and self-organizing manner, with priority given to the most recent example. The system)J
42 455 :M
1.216 .122(resolves inconsistencies and generalizes as each example is presented. ASOCS incorporates)J
42 467 :M
.794 .079(topological adaptation into the learning algorithm, rather than operating with an initially fixed)J
42 479 :M
-.034(topology. In execution mode, an ASOCS network operates as an asynchronous, parallel hardware)A
42 491 :M
.752 .075(circuit. The network maps input from the environment to output data based on the previously)J
42 503 :M
-.101(learned examples.)A
60 515 :M
.659 .066(Two significant advantages of ASOCS are: a\) an ASOCS net is guaranteed to learning any)J
42 527 :M
.044 .004(arbitrary set of legal examples, and b\) learning time is fast and bounded, being linear in the depth)J
42 539 :M
.309 .031(of the network and logarithmic in the number of nodes. Target applications for ASOCS include)J
42 551 :M
1.41 .141(adaptive logic devices, robotics, embedded systems, and pattern recognition \(standard ANN)J
42 563 :M
.031 .003(applications\). A detailed discussion of ASOCS is beyond the scope of this paper, but is available)J
42 575 :M
-.009(in the literature [3],[4],[5].)A
60 587 :M
-.141(One area of research interest is the development of an efficient implementation of ASOCS using)A
42 599 :M
-.031(current technology. A proof-of-concept ASOCS chip was developed at UCLA [1], however other)A
42 611 :M
-.102(more efficient implementations are of interest. A result of this research is the ASOCS model called)A
42 623 :M
2.353 .235(Location-Independent ASOCS \(LIA\) [6]. LIA uses a parallel, asynchronous network of)J
42 635 :M
-.1(independent nodes, which leads to an efficient physical realization using current technology.)A
60 647 :M
1.944 .194(This paper reviews the behavior of LIA, and shows how a static binary tree topology)J
42 659 :M
1.334 .133(efficiently supports that behavior. Section 2 discusses how to describe a model in terms of)J
42 671 :M
.502 .05(structure and behavior. Section 3 section briefly defines basic LIA mechanisms, execution and)J
42 683 :M
1.081 .108(learning modes. Section 4 discusses location independence. Section 5 describes the physical)J
endp
%%Page: 2 2
%%BeginPageSetup
initializepage
(Tony; page: 2 of 7)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
282 48 :M
f0_12 sf
(2)S
72 81 :M
-.073(topology with examples of operation. Section 6 concludes the paper.)A
72 113 :M
-.122(2. Describing the LIA Model: Structure and Behavior)A
90 131 :M
.746 .075(The critical function of a particular structure is to support efficiently the model's behavior.)J
72 143 :M
f1_12 sf
.416(Structure)A
f0_12 sf
1.252 .125( as defined here means the )J
f1_12 sf
.489(node)A
f0_12 sf
1.504 .15( \(internal\) structure and the network )J
f1_12 sf
.44(topology)A
f0_12 sf
1.031 .103(, or)J
72 155 :M
-.106(interconnect between the computing nodes.)A
90 167 :M
-.01(The topology must support the behavior of the nodes and the network as a whole. Topologies)A
72 179 :M
.462 .046(are often categorized according to form and behavior. There are many different forms, but two)J
72 191 :M
.517 .052(basic behaviors--)J
f1_12 sf
.074(static)A
f0_12 sf
.158 .016( and )J
f1_12 sf
.099(dynamic)A
f0_12 sf
.357 .036(. When discussing models and potential implementations, a)J
72 203 :M
-.012(distinction between logical and physical topologies is a useful one. The )A
f1_12 sf
-.012(logical topology)A
f0_12 sf
-.012( refers to)A
72 215 :M
.628 .063(the topology used in the model. The )J
f1_12 sf
1.293 .129(physical topology)J
f0_12 sf
.866 .087( refers to the implementation \(physical)J
72 227 :M
-.116(realization\) of the model topology.)A
90 239 :M
.691 .069(Described in these terms, previous ASOCS models map a dynamic logical topology onto a)J
72 251 :M
1.315 .131(dynamic physical topology. LIA uses learning and execution algorithms similar to those of)J
72 263 :M
.321 .032(previous models. The benefit of LIA is the efficiency of a static physical topology. LIA maps a)J
72 275 :M
.346 .035(dynamic logical topology onto a static physical topology in such a way that the dynamic logical)J
72 287 :M
-.088(behavior is efficiently preserved.)A
72 319 :M
-.106(3. BASIC MECHANISMS AND BEHAVIOR)A
90 337 :M
1.419 .142(The underlying mechanisms of LIA are the instance, the instance set, and their network)J
72 349 :M
-.025(representation. The basic unit of knowledge in LIA is the instance. An )A
f1_12 sf
-.027(instance)A
f0_12 sf
-.028( is composed of a)A
72 361 :M
-.009(conjunction of boolean input variables and an output variable. The output variable specifies what)A
72 373 :M
.543 .054(the network output should be for that variable whenever the input variables are matched by the)J
72 385 :M
-.092(environment. Two examples of instances are the following:)A
94 400 :M
1.125 .112(AB' => Z')J
94 412 :M
-.089(AB => Z)A
90 427 :M
1.104 .11(The first instance specifies that whenever A is high and B is low, Z should be low. The)J
72 439 :M
.451 .045(second instance specifies that whenever A and B are both high, Z should be high. LIA handles)J
72 451 :M
-.078(instances with multiple output variables. In order to simplify discussion of the model in this paper,)A
72 463 :M
-.092(examples use only one output variable Z.)A
90 475 :M
.507 .051(An instance )J
f1_12 sf
.541 .054(is matched by)J
f0_12 sf
.216 .022( \(or )J
f1_12 sf
.152(matches)A
f0_12 sf
.482 .048(\) an environment \(or state of the environment\) if and)J
72 487 :M
.141 .014(only if, for all variables in the instance's input list, the value of the variable in the environment is)J
72 499 :M
1.841 .184(the same as the value specified by the instance. The instance A => Z is matched by the)J
72 511 :M
.34 .034(environment ABCD, but is not matched by A'BCD, for example. An instance X )J
f1_12 sf
.098(covers)A
f0_12 sf
.431 .043( another)J
72 523 :M
-.083(instance Y if and only if all states of the environment that match Y also match X. The instance AB)A
72 535 :M
.507 .051(=> Z covers the instance ABC => Z, but does not cover the instance A'B => Z. An instance X)J
72 547 :M
f1_12 sf
-.036(contradicts)A
f0_12 sf
-.036( another instance Y \(and vice versa\) if and only if a\) X and Y specify opposite network)A
72 559 :M
.541 .054(outputs and b\) there exists at least one possible state of the environment by which X and Y are)J
72 571 :M
.274 .027(simultaneously matched. The instance AB'C => Z contradicts the instance AB' => Z': any state)J
72 583 :M
-.086(of the environment in which A is high and B is low matches simultaneously both instances.)A
90 595 :M
.804 .08(Previous ASOCS models use a distributed hierarchy of 2-input nodes in their networks, in)J
72 607 :M
1.072 .107(which each node represents only part of an instance. In an LIA network, each node stores a)J
72 619 :M
1.126 .113(complete instance. Figure 1 shows a network that has stored the two instances listed above.)J
72 631 :M
1.521 .152(Nodes are numbered for reference only. The topology \(how the nodes interconnect\) is not)J
72 643 :M
-.096(specified here in order to emphasize that the behavior of the model does not constrain the topology.)A
72 655 :M
-.036(Topology is discussed later.)A
90 670 :M
.672 .067(The collection of all instances currently stored in a network is called the )J
f1_12 sf
1.016 .102(instance set)J
f0_12 sf
.439 .044(. The)J
72 682 :M
-.101(instance set represents the function currently executed \(computed\) by the network.)A
endp
%%Page: 3 3
%%BeginPageSetup
initializepage
(Tony; page: 3 of 7)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
282 48 :M
f0_12 sf
(3)S
2 lw
137 75 338 65 rC
192 78 216 59 rS
221 90 :M
(\(1\))S
1 lw
209.5 98.5 65 20 rS
327 91 :M
(\(2\))S
352 113 :M
f3_12 sf
S
334 113 :M
f0_12 sf
-.142(AB Z)A
238 112 :M
f3_12 sf
S
219 112 :M
f0_12 sf
.455 .046(AB' Z')J
320.5 99.5 65 20 rS
10 156 204 193 109 @k
155 110 -1 1 185 109 1 155 109 @a
10 156 204 442 109 @k
409 110 -1 1 434 109 1 409 109 @a
141 99 :M
f2_12 sf
.332(Input)A
430 98 :M
.133(Output)A
gR
gS 30 31 552 730 rC
185 152 :M
f0_12 sf
-.062(Figure 1. An LIA Network \(topology unspecified\))A
72 180 :M
-.036(3.1. Execution Mode)A
90 198 :M
-.002(In execution mode, the network receives input from the environment and computes an output.)A
72 210 :M
.931 .093(The operation of the network is parallel, asynchronous, and the nodes are independent of one)J
72 222 :M
-.082(another. Each node in the network receives a "broadcast" of the environmental input and checks to)A
72 234 :M
1.91 .191(see if its input variables are matched. A node whose input variables are matched by the)J
72 246 :M
.684 .068(environment activates its output, and that value becomes the value of network. A node whose)J
72 258 :M
-.078(variables are not matched by the environment does nothing. The following two examples illustrate)A
72 270 :M
-.085(how LIA operates in execution mode. Assume the network in figure 1 is in execution mode.)A
90 282 :M
.236 .024(The input is )J
f2_12 sf
.124(ABCD.)A
f0_12 sf
.263 .026( Node 1 compares the environment to its input variables. Node 1 is not)J
72 294 :M
.74 .074(matched because B is high in the current environment. Node 1, therefore, does nothing more.)J
72 306 :M
.797 .08(Node 2, however, is matched because A and B are high \(C and D are )J
f1_12 sf
1.244 .124(don't care)J
f0_12 sf
.99 .099( variables for)J
72 318 :M
-.034(nodes 1 and 2 \). Node 2 therefore sends out Z as the value of the network.)A
90 330 :M
-.077(It is possible for more than one node to be matched simultaneously by the environmental input.)A
72 342 :M
2.606 .261(However, the learning algorithm assures that the network is always consistent \(without)J
72 354 :M
.823 .082(contradictions\). Thus, any nodes simultaneously matched by the environment have consistent)J
72 366 :M
.095(outputs.)A
90 378 :M
.378 .038(The input is )J
f2_12 sf
.719 .072(A'B'C'D'. )J
f0_12 sf
.473 .047(Node 1 is not matched because A is currently low. Therefore, node)J
72 390 :M
.261 .026(1 does nothing. Similarly, node 2 is not matched because A \(and/or B\) is currently low. It does)J
72 402 :M
.028 .003(nothing. In this case, none of the nodes in the network matches the input. The network outputs a)J
72 414 :M
f1_12 sf
.426 .043(don't know)J
f0_12 sf
.271 .027(, indicating that it does not know what the output should be. Along with this, several)J
72 426 :M
-.02(options exist for generalizing on the input in order to come up with a "best guess" answer, such as)A
72 438 :M
-.081(nearest-match, stochastic selection, etc.)A
72 463 :M
-.052(3.2. Learning Mode: Changing The Network Function)A
90 481 :M
.633 .063(The goal of learning is to store a consistent, efficient representation of instances \(including)J
72 493 :M
.179 .018(generalization\) which have been presented [7]. This includes changing the network function. In)J
72 505 :M
-.079(learning mode, both the input and the desired network output \(together called the )A
f1_12 sf
-.085(new instance)A
f0_12 sf
-.089(\) are)A
72 517 :M
2.2 .22(broadcast to the nodes. As in execution mode, the operation of the network is parallel,)J
72 529 :M
1.69 .169(asynchronous, and the nodes are independent. Each node receives a broadcast of the new)J
72 541 :M
-.063(instance, compares its stored instance with the new instance, and may change its function based on)A
72 553 :M
.284 .028(the result. A node may change its instance, may delete itself from the network, or not change at)J
72 565 :M
(all.)S
90 577 :M
.165 .016(Following are four limited examples of how learning takes place. For an in-depth discussion)J
72 589 :M
-.008(of LIA learning see [7]. Assume the initial network state in figure 1. The addition of each of the)A
72 601 :M
-.092(following four instances in order illustrates how a network operates in learning mode:)A
94 616 :M
.585 .059(\(1\) A'B'C => Z)J
94 628 :M
.609 .061(\(2\) A' => Z')J
94 640 :M
.927 .093(\(3\) A'B' => Z')J
94 652 :M
.454 .045(\(4\) AB'CD' => Z)J
90 667 :M
f2_12 sf
.569 .057(A'B'C => Z.)J
f0_12 sf
.487 .049( Because their respective A's are opposite from the new instance, nodes 1 and)J
72 679 :M
.77 .077(2 do not cover the new instance, and are not contradicted by it. Thus, neither node makes any)J
72 691 :M
-.019(changes. However, a new node representing the new instance must be added to the network. The)A
72 703 :M
.042 .004(new network is shown in figure 2.)J
endp
%%Page: 4 4
%%BeginPageSetup
initializepage
(Tony; page: 4 of 7)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
282 48 :M
f0_12 sf
(4)S
2 lw
132 72 347 65 rC
190 75 232 59 rS
10 156 204 188 106 @k
150 107 -1 1 180 106 1 150 106 @a
10 156 204 454 103 @k
422 104 -1 1 446 103 1 422 103 @a
136 97 :M
f2_12 sf
.332(Input)A
434 94 :M
.133(Output)A
210 96 :M
f0_12 sf
(\(1\))S
1 lw
198.5 104.5 65 20 rS
229 118 :M
f3_12 sf
S
208 119 :M
f0_12 sf
.383 .038(AB' Z')J
279 97 :M
(\(2\))S
273.5 104.5 65 20 rS
302 118 :M
f3_12 sf
S
284 119 :M
f0_12 sf
-.125(AB Z)A
349.5 104.5 65 20 rS
357 97 :M
(\(3\))S
389 118 :M
f3_12 sf
S
354 119 :M
f0_12 sf
.378 .038(A'B'C Z)J
gR
gS 30 31 552 730 rC
194 149 :M
f0_12 sf
.127 .013(Figure 2. Network After Instance A'B'C => Z)J
72 164 :M
f2_12 sf
.989 .099(A' => Z'.)J
f0_12 sf
.904 .09( Nodes 1 and 2 do not cover the new instance, nor are contradicted by it. Node 3,)J
72 176 :M
.642 .064(however, is completely contradicted. It specifies that when A and B are low and C is high, Z)J
72 188 :M
.005 .001(must be high. The new instance specifies that anytime A is low \(regardless of B or C\), Z must be)J
72 200 :M
.084 .008(low. In this case, node 3 self-deletes from the network, and a new node 3 is added that stores the)J
72 212 :M
-.011(new instance. The new network is shown in figure 3.)A
2 lw
132 221 347 65 rC
190 224 232 59 rS
210 242 :M
(\(1\))S
1 lw
198.5 250.5 65 20 rS
227 265 :M
f3_12 sf
S
208 265 :M
f0_12 sf
.455 .046(AB' Z')J
307 265 :M
f3_12 sf
S
289 265 :M
f0_12 sf
-.142(AB Z)A
279 242 :M
(\(2\))S
272.5 250.5 65 20 rS
10 156 204 188 255 @k
150 256 -1 1 180 255 1 150 255 @a
10 156 204 454 252 @k
422 253 -1 1 446 252 1 422 252 @a
136 246 :M
f2_12 sf
.332(Input)A
434 243 :M
.133(Output)A
346.5 250.5 65 20 rS
356 242 :M
f0_12 sf
(\(3\))S
369 264 :M
f3_12 sf
S
355 264 :M
f0_12 sf
.39 .039(A' Z')J
gR
gS 30 31 552 730 rC
202 298 :M
f0_12 sf
.129 .013(Figure 3. Network After Instance A' => Z')J
90 313 :M
f2_12 sf
.791 .079(A'B' => Z'.)J
f0_12 sf
.645 .065( Nodes 1 and 2 do not cover the new instance, nor are they contradicted by it.)J
72 325 :M
-.04(Node 3, on the other hand, covers the new instance--the network already sets "Z low" for all states)A
72 337 :M
-.049(that match the new instance. Therefore, the new instance is not added. The network remains as in)A
72 349 :M
.196 .02(figure 3.)J
90 361 :M
f2_12 sf
.882 .088(AB'CD' => Z.)J
f0_12 sf
.611 .061( Nodes 2 and 3 do not cover the new instance, nor are they contradicted by)J
72 373 :M
.229 .023(it. Node 1, however, is partially contradicted. Node 1 specifies that any time A is high and B is)J
72 385 :M
.124 .012(low, Z must be low, regardless of the value of C or D. The new instance specifies that whenever)J
72 397 :M
-.043(A and C are high, and B and D are low, Z must be high.)A
90 409 :M
.465 .047(The network must be modified to represent correctly the new instance, while preserving the)J
72 421 :M
-.043(non-contradicted information in node 1. In this case, the instance in node 1 is modified to become)A
72 433 :M
.339 .034(two instances: AB'C' => Z' and AB'D => Z'. \(These two instances cover the information in the)J
72 445 :M
.722 .072(instance in node 1 that is not explicitly contradicted by the new instance.\) Node 1 initiates the)J
72 457 :M
.79 .079(addition of two nodes to the network, each of which stores one of the instances. Node 1 then)J
72 469 :M
-.019(deletes itself from the network. The new instance must now be added to the network. Since node)A
72 481 :M
-.02(1 is free, node one stores the new instance. Figure 4 shows the final network.)A
2 lw
129 487 354 88 rC
175 490 247 82 rS
195 503 :M
(\(1\))S
278 504 :M
(\(2\))S
306 524 :M
f3_12 sf
S
288 524 :M
f0_12 sf
-.142(AB Z)A
1 lw
271.5 510.5 65 20 rS
10 156 204 173 522 @k
135 523 -1 1 165 522 1 135 522 @a
10 156 204 455 529 @k
423 530 -1 1 447 529 1 423 529 @a
133 510 :M
f2_12 sf
.332(Input)A
438 519 :M
.133(Output)A
353 503 :M
f0_12 sf
(\(3\))S
345.5 510.5 65 20 rS
371 524 :M
f3_12 sf
S
357 524 :M
f0_12 sf
.39 .039(A' Z')J
183.5 510.5 74 20 rS
189 525 :M
.28 .028(AB'CD' Z)J
231 524 :M
f3_12 sf
S
190.5 546.5 68 17 rS
266 559 :M
f0_12 sf
(\(4\))S
194 559 :M
.628 .063(AB'C' Z')J
228 560 :M
f3_12 sf
S
305.5 546.5 73 17 rS
383 560 :M
f0_12 sf
(\(5\))S
314 560 :M
.285 .028(AB'D Z')J
346 559 :M
f3_12 sf
.144 .014J
gR
gS 30 31 552 730 rC
190 588 :M
f0_12 sf
.072 .007(Figure 4. Network After Instance AB'CD' => Z)J
72 623 :M
-.109(4. LOCATION INDEPENDENCE)A
90 644 :M
.3 .03(The fact that the operation \(in either mode\) of each node is independent of any other node is)J
72 656 :M
.723 .072(central to the strengths of the LIA model. )J
f1_12 sf
1.655 .165(Node independence)J
f0_12 sf
1.001 .1( exists in two ways--functional)J
72 668 :M
1.286 .129(independence and location independence. )J
f1_12 sf
2.095 .209(Functional independence)J
f0_12 sf
1.04 .104( means that the function)J
72 680 :M
1.46 .146(computed by a particular node is independent of the function computed by any other node.)J
72 692 :M
f1_12 sf
3.012 .301(Location independence)J
f0_12 sf
1.367 .137( means that the physical position of a node in the network does not)J
72 704 :M
.071 .007(determine the node's function nor affect the network's overall function. Functional independence)J
72 716 :M
.307 .031(exists because each node stores a complete instance. All of the information about an instance is)J
endp
%%Page: 5 5
%%BeginPageSetup
initializepage
(Tony; page: 5 of 7)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
282 48 :M
f0_12 sf
(5)S
72 81 :M
-.13(locally available at a single node. Functional independence leads to location independence.)A
90 93 :M
-.153(Location independence in the logical model has important implications for implementation. The)A
72 105 :M
.607 .061(physical position of a node \(or an instance\) does not matter, therefore nodes \(instances\) can be)J
72 117 :M
1.07 .107(shuffled easily around the network. No particular logical topology is required by the model,)J
72 129 :M
.845 .085(therefore any desired physical topology may be used. )J
f1_12 sf
.936 .094( Freed from functional \(and locational\))J
72 141 :M
-.079(constraints, the logical topology may be designed to accommodate the constraints and needs of the)A
72 153 :M
(physical topology.)S
72 185 :M
f0_12 sf
-.115(5. PHYSICAL TOPOLOGY)A
90 203 :M
-.076(The physical topology must support parallel broadcast and parallel gather operations. A binary)A
72 215 :M
.359 .036(tree topology is natural since it a\) has fast [)J
f1_12 sf
.125(O\(log\(n\))A
f0_12 sf
.325 .033(, where )J
f1_12 sf
.143(n)A
f0_12 sf
.342 .034( is the number of nodes in the net])J
72 227 :M
.231 .023(broadcast and gather times, b\) has a regular topology that scales well, and c\) has a simple, parsi-)J
72 239 :M
1.614 .161(monious form. This section illustrates how learning occurs, and then gives an example of)J
72 251 :M
-.07(operation in execution mode. A more in-depth discussion occurs in [7].)A
90 263 :M
f1_12 sf
.795 .079(Learning Example:)J
f0_12 sf
.4 .04( Figure 5 shows the network from the previous learning example in tree)J
72 275 :M
.494 .049(form. Free nodes beyond the frontier of the network exist but are not shown. Figure 5a shows)J
72 287 :M
-.096(the same initial network as before. Again, the four instances to be added are the following:)A
94 302 :M
.585 .059(\(1\) A'B'C => Z)J
94 314 :M
.609 .061(\(2\) A' => Z')J
94 326 :M
.927 .093(\(3\) A'B' => Z')J
94 338 :M
.454 .045(\(4\) AB'CD' => Z)J
90 353 :M
f2_12 sf
.788 .079(A'B'C => Z.)J
f0_12 sf
.636 .064( The new instance is broadcast to the network. The broadcast is received by)J
72 365 :M
1.157 .116(the root node, which then sends the broadcast on to node 2, and performs its comparison as)J
72 377 :M
-.011(indicated earlier. Node 2 receives the broadcast and performs its comparison as indicated earlier.)A
72 389 :M
.203 .02(Assume that the broadcast terminates at a node when all the node's children are free. As neither)J
72 401 :M
.947 .095(node matches the instance, neither node needs to change, and therefore, neither node sends a)J
72 413 :M
.128(response.)A
90 425 :M
.728 .073( When no response is received, the new instance is rebroadcast to the network with a flag)J
72 437 :M
.467 .047(indicating that it must be added. A single node is uniquely selected \(shortest path to free node,)J
72 449 :M
.533 .053(etc.\) and allocated to store the new instance \(figure 5b\). When this is complete, the network is)J
72 461 :M
-.112(ready to learn the next instance.)A
90 473 :M
f2_12 sf
.079 .008(A' => Z'.)J
f0_12 sf
.084 .008( When node 3 receives the broadcast, it self-deletes \(marks itself as free\) because it)J
72 485 :M
-.074(is completely contradicted. Since none of the nodes cover the new instance, the new instance must)A
72 497 :M
-.045(be added. Since node 3 is free, it stores the new instance \(figure 5c\).)A
90 509 :M
f2_12 sf
.484 .048(A'B' => Z'.)J
f0_12 sf
.434 .043( When node 3 receives this broadcast, it sends a response back to its parent \(the)J
72 521 :M
-.073(root\) indicating that it --or at least one node it its subtrees--covers the new instance. \(If at least one)A
72 533 :M
.16 .016(node covers a new instance, exactly which one, or how many are immaterial.\) None of the other)J
72 545 :M
.157 .016(nodes respond, so the root node passes the response it received from node 3 out of the network.)J
72 557 :M
.173 .017(The new instance is disregarded, and no changes are made to the network. The network is ready)J
72 569 :M
-.078(for the next instance \(figure 5c\).)A
90 581 :M
f2_12 sf
.463 .046(AB'C'D => Z.)J
f0_12 sf
.353 .035( Node 1 is partially contradicted by the new instance, and calculates the new)J
72 593 :M
.164 .016(instances that need to be added. None of the other nodes cover, nor are contradicted by, the new)J
72 605 :M
-.039(instance.)A
90 617 :M
.557 .056(Node 1 initiates the addition of two nodes to the network: one node stores AB'C' => Z' and)J
72 629 :M
.272 .027(the other node stores AB'D => Z'. Node 1 sends AB'C => Z to its left child, and AB'D' => Z' to)J
72 641 :M
.125 .013(its right child \(nodes 2 and 3\) respectively. \(If there were more, the instances would be split into)J
72 653 :M
-.011(two groups and sent to the children. Node 2 is allocated already, but its left child is free. The left)A
72 665 :M
.144 .014(child stores the instance AB'C => Z. Node 3 is allocated already, but its left child is free. Node)J
72 677 :M
.291 .029(3's left child stores AB'D' => Z'. Node 1 then deletes itself from the network, but when the new)J
72 689 :M
-.077(instance is added, node 1 is allocated to store the new instance \(figure 5d\).)A
90 701 :M
1.002 .1(Two operations, parallel allocation of new nodes and deletion deserve particular mention.)J
72 713 :M
-.052(First, parallel allocation of new nodes can take place without contention because different subtrees)A
endp
%%Page: 6 6
%%BeginPageSetup
initializepage
(Tony; page: 6 of 7)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
282 48 :M
f0_12 sf
(6)S
72 81 :M
-.044(are unique. When a node determines that more nodes \(instances\) need to be added to the network,)A
72 93 :M
.175 .018(all of the information about those instances is calculated by that node. The node then broadcasts)J
72 105 :M
-.074(that information to the children in its subtrees. If a particular subtree is full, the information can be)A
72 117 :M
1.048 .105(passed up to the parent and over to another subtree, until sufficient free nodes are found and)J
72 129 :M
-.04(allocated. Second, when a node deletes from the network it marks itself as "free". The same node)A
72 141 :M
.432 .043(\(at some later time\) may be allocated to store another instance. In particular, "holes" which are)J
72 153 :M
.907 .091(created in the tree by deletions may be filled by other instances in such a way that the tree is)J
72 165 :M
.82 .082(compacted \(made full and balanced\). This improves the speed of learning and execution, and)J
72 177 :M
-.057(makes efficient use of nodes.)A
95 183 421 333 rC
340 418 :M
-.125(AB Z)A
331 481 :M
.628 .063(AB'C' Z')J
365 481 :M
f3_12 sf
S
2 lw
98 225 195 89 rS
1 lw
181.5 244.5 69 22 rS
154 245 :M
f0_12 sf
(\(1\))S
212 258 :M
f3_12 sf
S
122.5 287.5 62 22 rS
133 276 :M
f0_12 sf
(\(2\))S
150 302 :M
f3_12 sf
S
-1 -1 164 287 1 1 209 267 @b
185 337 :M
f2_12 sf
1.007(\(a\))A
10 -114 -66 194 243 @k
-1 -1 194 236 1 1 193 186 @b
10 66 114 230 187 @k
-1 -1 230 245 1 1 229 195 @b
124 199 :M
.418(broadcast)A
242 203 :M
.337(gather)A
126 214 :M
.415(input)A
243 216 :M
.4(output)A
190 259 :M
f0_12 sf
.383 .038(AB' Z')J
132 302 :M
-.125(AB Z)A
469 296 :M
f3_12 sf
S
2 lw
304 225 198 89 rS
1 lw
429.5 282.5 66 22 rS
471 274 :M
f0_12 sf
(\(3\))S
372.5 239.5 69 22 rS
345 240 :M
(\(1\))S
403 253 :M
f3_12 sf
S
322.5 282.5 62 22 rS
318 271 :M
f0_12 sf
(\(2\))S
350 298 :M
f3_12 sf
S
-1 -1 371 282 1 1 409 262 @b
410 263 -1 1 451 281 1 410 262 @a
387 335 :M
f2_12 sf
1.171(\(b\))A
381 253 :M
f0_12 sf
.383 .038(AB' Z')J
333 299 :M
-.125(AB Z)A
436 297 :M
.378 .038(A'B'C Z)J
467 415 :M
f3_12 sf
S
2 lw
315 347 198 146 rS
1 lw
379.5 360.5 74 22 rS
352 361 :M
f0_12 sf
(\(1\))S
358 418 :M
f3_12 sf
S
405 510 :M
f2_12 sf
1.171(\(d\))A
440.5 402.5 63 22 rS
479 394 :M
f0_12 sf
(\(3\))S
329.5 403.5 62 22 rS
325 392 :M
(\(2\))S
-1 -1 377 403 1 1 416 383 @b
418 383 -1 1 459 402 1 418 382 @a
326.5 467.5 67 18 rS
318 456 :M
(\(4\))S
461 459 :M
(\(5\))S
411.5 468.5 70 16 rS
417 481 :M
.285 .028(AB'D Z')J
447 480 :M
f3_12 sf
S
-1 -1 344 467 1 1 367 426 @b
-1 -1 425 468 1 1 468 424 @b
455 415 :M
f0_12 sf
.39 .039(A' Z')J
2 lw
98 347 198 89 rS
1 lw
223.5 402.5 63 22 rS
262 394 :M
(\(3\))S
162.5 360.5 69 22 rS
135 361 :M
(\(1\))S
193 375 :M
f3_12 sf
S
112.5 403.5 62 22 rS
108 392 :M
f0_12 sf
(\(2\))S
140 418 :M
f3_12 sf
S
-1 -1 161 404 1 1 201 382 @b
200 384 -1 1 241 401 1 200 383 @a
186 457 :M
f2_12 sf
1.344(\(c\))A
248 415 :M
f3_12 sf
S
172 376 :M
f0_12 sf
.383 .038(AB' Z')J
122 418 :M
-.125(AB Z)A
235 416 :M
.39 .039(A' Z')J
385 375 :M
.28 .028(AB'CD' Z)J
427 375 :M
f3_12 sf
S
gR
gS 30 31 552 730 rC
177 528 :M
f0_12 sf
-.095(Figure 5. Example LIA Network With Tree Topology)A
90 546 :M
f1_12 sf
.559 .056(Execution Example:)J
f2_12 sf
( )S
f0_12 sf
.264 .026( Assume that the network in figure 5d is in execution mode, and the the)J
72 558 :M
.893 .089(environmental input is )J
f2_12 sf
.289(AB'CD')A
f0_12 sf
.627 .063(. Node 1 receives the broadcast and sends it to nodes 2 and 3.)J
72 570 :M
.709 .071(Nodes 2 and 3 send the broadcast on to nodes 4 and 5. Nodes 1, 2, 3 and 4 do not match this)J
72 582 :M
.58 .058(input, but node 5 matches. Node 5 sends "Z high" to node 3. Node 3 combines "Z high" with)J
72 594 :M
.546 .055("no response" and sends "Z high" to node 1. Node 4 has no response, so node 2 combines "no)J
72 606 :M
.757 .076(response" with "no response" and does nothing further. Node 1 combines "no response" from)J
72 618 :M
-.033(node 2 with "Z high" from node 3, and sends "Z high" out as the output of the network.)A
72 650 :M
.207 .021(6. CONCLUSION)J
90 671 :M
.342 .034(The LIA model uses a static binary tree topology for a network of independent nodes. Each)J
72 683 :M
-.026(node stores a complete instance, and instances may be shuffled easily among nodes. This leads to)A
72 695 :M
-.124(an efficient physical realization using current technology.)A
90 707 :M
.51 .051(LIA uses an m-input logic gate, rather than the 2-input logic gate used in previous models.)J
endp
%%Page: 7 7
%%BeginPageSetup
initializepage
(Tony; page: 7 of 7)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
282 48 :M
f0_12 sf
(7)S
72 81 :M
.23 .023(Thus, an LIA node is slightly more complex than nodes in previous models. Also, because each)J
72 93 :M
.44 .044(node stores a complete instance, LIA's internal representation of instance sets is less distributed)J
72 105 :M
-.012(than representations in previous ASOCS models [7]. However, the main strength of LIA is that it)A
72 117 :M
-.13(is efficiently implementable using current technology.)A
90 129 :M
.841 .084(Current research for LIA \(and for ASOCS\) focusses on using LIA in solving applications,)J
72 141 :M
.177 .018(extending the overall applicability of ASOCS, improving generalization abilities, and continuing)J
72 153 :M
-.131(development of a hardware implementation.)A
72 185 :M
-.037(REFERENCES)A
72 203 :M
1.273 .127([1] Chang J., and J.J. Vidal, Inferencing in Hardware. )J
f1_12 sf
1.802 .18(Proceedings of the MCC University)J
89 215 :M
.695 .069(Research Symposium)J
f0_12 sf
.368 .037(, Austin, TX, 1987.)J
72 228 :M
1.707 .171([2] Martinez, T.R., )J
f1_12 sf
2.621 .262(Adaptive Self-organizing Logic Networks)J
f0_12 sf
2.23 .223(. UCLA Ph.D. Disser-tation,)J
89 240 :M
.169(\(1986\).)A
72 255 :M
.564 .056([3] Martinez, T.R., J.J. Vidal, Adaptive Parallel Logic Networks. )J
f1_12 sf
.605 .06(Journal of Parallel and Dis-)J
89 267 :M
.454 .045(tributed Computing, Vol. 5, #1)J
f0_12 sf
.391 .039(, pp. 26-58, 1988.)J
72 282 :M
-.023([4] Martinez, T.R., Adaptive Self-Organizing Concurrent Systems. )A
f1_12 sf
-.024(Progress in Neural Networks)A
f0_12 sf
(,)S
89 294 :M
.366 .037(pp.105-126, Ablex Publishing, 1990.)J
72 309 :M
.387 .039([5] Martinez, T.R., D.M. Campbell, A Self-Adjusting Dynamic Logic Module. To appear in the)J
89 321 :M
f1_12 sf
-.17(Journal of Parallel and Distributed Processing)A
f0_12 sf
-.19(, 1991.)A
72 336 :M
.302 .03([6] Rudolph G., and T.R. Martinez, DNA: Towards an Implementation of ASOCS. )J
f1_12 sf
.088(Proceedings)A
89 348 :M
.272 .027(of the IASTED International Symposium on Expert Systems and Neural Networks)J
f0_12 sf
.216 .022(, pp. 12-15,)J
89 360 :M
.25(1989.)A
72 375 :M
.191 .019([7] Rudolph, G., )J
f1_12 sf
.323 .032(A Location-Independent ASOCS Model.)J
f0_12 sf
.233 .023( BYU Master's Thesis, January 1991.)J
72 390 :M
.729 .073([8] Rumelhart, D., J. McClelland, et. al., )J
f1_12 sf
1.044 .104(Parallel Distributed Processing: Explorations in the)J
89 402 :M
.091 .009(Microstructure of Cognition, Vol. 1)J
f0_12 sf
.076 .008(, MIT Press, 1986.)J
endp
%%Trailer
end
%%EOF