%!PS-Adobe-3.0
%%Title: (CGITCM)
%%Creator: (Microsoft Word: LaserWriter 8 8.2)
%%CreationDate: (1:24 PM Thursday, July 13, 1995)
%%For: (Tony)
%%Pages: 10
%%DocumentFonts: Times-Roman Times-Italic Times-Bold Times-BoldItalic Symbol
%%DocumentNeededFonts: Times-Roman Times-Italic Times-Bold Times-BoldItalic Symbol
%%DocumentSuppliedFonts:
%%DocumentData: Clean7Bit
%%PageOrder: Ascend
%%Orientation: Portrait
%%DocumentMedia: Default 612 792 0 () ()
%ADO_ImageableArea: 30 31 582 761
%%EndComments
userdict begin/dscInfo 5 dict dup begin
/Title(CGITCM)def
/Creator(Microsoft Word: LaserWriter 8 8.2)def
/CreationDate(1:24 PM Thursday, July 13, 1995)def
/For(Tony)def
/Pages 1 def
end def end
/md 157 dict def md begin/currentpacking where {pop /sc_oldpacking currentpacking def true setpacking}if
%%BeginFile: adobe_psp_basic
%%Copyright: Copyright 1990-1993 Adobe Systems Incorporated. All Rights Reserved.
/bd{bind def}bind def
/xdf{exch def}bd
/xs{exch store}bd
/ld{load def}bd
/Z{0 def}bd
/T/true
/F/false
/:L/lineto
/lw/setlinewidth
/:M/moveto
/rl/rlineto
/rm/rmoveto
/:C/curveto
/:T/translate
/:K/closepath
/:mf/makefont
/gS/gsave
/gR/grestore
/np/newpath
14{ld}repeat
/$m matrix def
/av 81 def
/por true def
/normland false def
/psb-nosave{}bd
/pse-nosave{}bd
/us Z
/psb{/us save store}bd
/pse{us restore}bd
/level2
/languagelevel where
{
pop languagelevel 2 ge
}{
false
}ifelse
def
/featurecleanup
{
stopped
cleartomark
countdictstack exch sub dup 0 gt
{
{end}repeat
}{
pop
}ifelse
}bd
/noload Z
/startnoload
{
{/noload save store}if
}bd
/endnoload
{
{noload restore}if
}bd
level2 startnoload
/setjob
{
statusdict/jobname 3 -1 roll put
}bd
/setcopies
{
userdict/#copies 3 -1 roll put
}bd
level2 endnoload level2 not startnoload
/setjob
{
1 dict begin/JobName xdf currentdict end setuserparams
}bd
/setcopies
{
1 dict begin/NumCopies xdf currentdict end setpagedevice
}bd
level2 not endnoload
/pm Z
/mT Z
/sD Z
/realshowpage Z
/initializepage
{
/pm save store mT concat
}bd
/endp
{
pm restore showpage
}def
/$c/DeviceRGB def
/rectclip where
{
pop/rC/rectclip ld
}{
/rC
{
np 4 2 roll
:M
1 index 0 rl
0 exch rl
neg 0 rl
:K
clip np
}bd
}ifelse
/rectfill where
{
pop/rF/rectfill ld
}{
/rF
{
gS
np
4 2 roll
:M
1 index 0 rl
0 exch rl
neg 0 rl
fill
gR
}bd
}ifelse
/rectstroke where
{
pop/rS/rectstroke ld
}{
/rS
{
gS
np
4 2 roll
:M
1 index 0 rl
0 exch rl
neg 0 rl
:K
stroke
gR
}bd
}ifelse
%%EndFile
%%BeginFile: adobe_psp_colorspace_level1
%%Copyright: Copyright 1991-1993 Adobe Systems Incorporated. All Rights Reserved.
/G/setgray ld
/:F/setrgbcolor ld
%%EndFile
%%BeginFile: adobe_psp_basic_text
%%Copyright: Copyright 1990-1993 Adobe Systems Incorporated. All Rights Reserved.
/S/show ld
/A{
0.0 exch ashow
}bd
/R{
0.0 exch 32 exch widthshow
}bd
/W{
0.0 3 1 roll widthshow
}bd
/J{
0.0 32 4 2 roll 0.0 exch awidthshow
}bd
/V{
0.0 4 1 roll 0.0 exch awidthshow
}bd
/fcflg true def
/fc{
fcflg{
vmstatus exch sub 50000 lt{
(%%[ Warning: Running out of memory ]%%\r)print flush/fcflg false store
}if pop
}if
}bd
/$f[1 0 0 -1 0 0]def
/:ff{$f :mf}bd
/MacEncoding StandardEncoding 256 array copy def
MacEncoding 39/quotesingle put
MacEncoding 96/grave put
/Adieresis/Aring/Ccedilla/Eacute/Ntilde/Odieresis/Udieresis/aacute
/agrave/acircumflex/adieresis/atilde/aring/ccedilla/eacute/egrave
/ecircumflex/edieresis/iacute/igrave/icircumflex/idieresis/ntilde/oacute
/ograve/ocircumflex/odieresis/otilde/uacute/ugrave/ucircumflex/udieresis
/dagger/degree/cent/sterling/section/bullet/paragraph/germandbls
/registered/copyright/trademark/acute/dieresis/notequal/AE/Oslash
/infinity/plusminus/lessequal/greaterequal/yen/mu/partialdiff/summation
/product/pi/integral/ordfeminine/ordmasculine/Omega/ae/oslash
/questiondown/exclamdown/logicalnot/radical/florin/approxequal/Delta/guillemotleft
/guillemotright/ellipsis/space/Agrave/Atilde/Otilde/OE/oe
/endash/emdash/quotedblleft/quotedblright/quoteleft/quoteright/divide/lozenge
/ydieresis/Ydieresis/fraction/currency/guilsinglleft/guilsinglright/fi/fl
/daggerdbl/periodcentered/quotesinglbase/quotedblbase/perthousand
/Acircumflex/Ecircumflex/Aacute/Edieresis/Egrave/Iacute/Icircumflex/Idieresis/Igrave
/Oacute/Ocircumflex/apple/Ograve/Uacute/Ucircumflex/Ugrave/dotlessi/circumflex/tilde
/macron/breve/dotaccent/ring/cedilla/hungarumlaut/ogonek/caron
MacEncoding 128 128 getinterval astore pop
level2 startnoload
/copyfontdict
{
findfont dup length dict
begin
{
1 index/FID ne{def}{pop pop}ifelse
}forall
}bd
level2 endnoload level2 not startnoload
/copyfontdict
{
findfont dup length dict
copy
begin
}bd
level2 not endnoload
md/fontname known not{
/fontname/customfont def
}if
/Encoding Z
/:mre
{
copyfontdict
/Encoding MacEncoding def
fontname currentdict
end
definefont :ff def
}bd
/:bsr
{
copyfontdict
/Encoding Encoding 256 array copy def
Encoding dup
}bd
/pd{put dup}bd
/:esr
{
pop pop
fontname currentdict
end
definefont :ff def
}bd
/scf
{
scalefont def
}bd
/scf-non
{
$m scale :mf setfont
}bd
/ps Z
/fz{/ps xs}bd
/sf/setfont ld
/cF/currentfont ld
/mbf
{
/makeblendedfont where
{
pop
makeblendedfont
/ABlend exch definefont
}{
pop
}ifelse
def
}def
%%EndFile
%%BeginFile: adobe_psp_derived_styles
%%Copyright: Copyright 1990-1993 Adobe Systems Incorporated. All Rights Reserved.
/wi
version(23.0)eq
{
{
gS 0 0 0 0 rC stringwidth gR
}bind
}{
/stringwidth load
}ifelse
def
/$o 1. def
/gl{$o G}bd
/ms{:M S}bd
/condensedmtx[.82 0 0 1 0 0]def
/:mc
{
condensedmtx :mf def
}bd
/extendedmtx[1.18 0 0 1 0 0]def
/:me
{
extendedmtx :mf def
}bd
/basefont Z
/basefonto Z
/dxa Z
/dxb Z
/dxc Z
/dxd Z
/dsdx2 Z
/bfproc Z
/:fbase
{
dup/FontType get 0 eq{
dup length dict begin
dup{1 index/FID ne 2 index/UniqueID ne and{def}{pop pop}ifelse}forall
/FDepVector exch/FDepVector get[exch/:fbase load forall]def
}/bfproc load ifelse
/customfont currentdict end definefont
}bd
/:mo
{
/bfproc{
dup dup length 2 add dict
begin
{
1 index/FID ne 2 index/UniqueID ne and{def}{pop pop}ifelse
}forall
/PaintType 2 def
/StrokeWidth .012 0 FontMatrix idtransform pop def
/customfont currentdict
end
definefont
8 dict begin
/basefonto xdf
/basefont xdf
/FontType 3 def
/FontMatrix[1 0 0 1 0 0]def
/FontBBox[0 0 1 1]def
/Encoding StandardEncoding def
/BuildChar
{
exch begin
basefont setfont
( )dup 0 4 -1 roll put
dup wi
setcharwidth
0 0 :M
gS
gl
dup show
gR
basefonto setfont
show
end
}def
}store :fbase
}bd
/:mso
{
/bfproc{
7 dict begin
/basefont xdf
/FontType 3 def
/FontMatrix[1 0 0 1 0 0]def
/FontBBox[0 0 1 1]def
/Encoding StandardEncoding def
/BuildChar
{
exch begin
sD begin
/dxa 1 ps div def
basefont setfont
( )dup 0 4 -1 roll put
dup wi
1 index 0 ne
{
exch dxa add exch
}if
setcharwidth
dup 0 0 ms
dup dxa 0 ms
dup dxa dxa ms
dup 0 dxa ms
gl
dxa 2. div dup ms
end
end
}def
}store :fbase
}bd
/:ms
{
/bfproc{
dup dup length 2 add dict
begin
{
1 index/FID ne 2 index/UniqueID ne and{def}{pop pop}ifelse
}forall
/PaintType 2 def
/StrokeWidth .012 0 FontMatrix idtransform pop def
/customfont currentdict
end
definefont
8 dict begin
/basefonto xdf
/basefont xdf
/FontType 3 def
/FontMatrix[1 0 0 1 0 0]def
/FontBBox[0 0 1 1]def
/Encoding StandardEncoding def
/BuildChar
{
exch begin
sD begin
/dxb .05 def
basefont setfont
( )dup 0 4 -1 roll put
dup wi
exch dup 0 ne
{
dxb add
}if
exch setcharwidth
dup dxb .01 add 0 ms
0 dxb :T
gS
gl
dup 0 0 ms
gR
basefonto setfont
0 0 ms
end
end
}def
}store :fbase
}bd
/:mss
{
/bfproc{
7 dict begin
/basefont xdf
/FontType 3 def
/FontMatrix[1 0 0 1 0 0]def
/FontBBox[0 0 1 1]def
/Encoding StandardEncoding def
/BuildChar
{
exch begin
sD begin
/dxc 1 ps div def
/dsdx2 .05 dxc 2 div add def
basefont setfont
( )dup 0 4 -1 roll put
dup wi
exch dup 0 ne
{
dsdx2 add
}if
exch setcharwidth
dup dsdx2 .01 add 0 ms
0 .05 dxc 2 div sub :T
dup 0 0 ms
dup dxc 0 ms
dup dxc dxc ms
dup 0 dxc ms
gl
dxc 2 div dup ms
end
end
}def
}store :fbase
}bd
/:msb
{
/bfproc{
7 dict begin
/basefont xdf
/FontType 3 def
/FontMatrix[1 0 0 1 0 0]def
/FontBBox[0 0 1 1]def
/Encoding StandardEncoding def
/BuildChar
{
exch begin
sD begin
/dxd .03 def
basefont setfont
( )dup 0 4 -1 roll put
dup wi
1 index 0 ne
{
exch dxd add exch
}if
setcharwidth
dup 0 0 ms
dup dxd 0 ms
dup dxd dxd ms
0 dxd ms
end
end
}def
}store :fbase
}bd
/italicmtx[1 0 -.212557 1 0 0]def
/:mi
{
italicmtx :mf def
}bd
/:v
{
[exch dup/FontMatrix get exch
dup/FontInfo known
{
/FontInfo get
dup/UnderlinePosition known
{
dup/UnderlinePosition get
2 index 0
3 1 roll
transform
exch pop
}{
.1
}ifelse
3 1 roll
dup/UnderlineThickness known
{
/UnderlineThickness get
exch 0 3 1 roll
transform
exch pop
abs
}{
pop pop .067
}ifelse
}{
pop pop .1 .067
}ifelse
]
}bd
/$t Z
/$p Z
/$s Z
/:p
{
aload pop
2 index mul/$t xs
1 index mul/$p xs
.012 mul/$s xs
}bd
/:m
{gS
0 $p rm
$t lw
0 rl stroke
gR
}bd
/:n
{
gS
0 $p rm
$t lw
0 rl
gS
gl
stroke
gR
strokepath
$s lw
/setstrokeadjust where{pop
currentstrokeadjust true setstrokeadjust stroke setstrokeadjust
}{
stroke
}ifelse
gR
}bd
/:o
{gS
0 $p rm
$t 2 div dup rm
$t lw
dup 0 rl
stroke
gR
:n
}bd
%%EndFile
/currentpacking where {pop sc_oldpacking setpacking}if end
%%EndProlog
%%BeginSetup
md begin
countdictstack[{
%%BeginFeature: *ManualFeed False
level2 {1 dict dup /ManualFeed false put setpagedevice}{statusdict begin /manualfeed false store end} ifelse
%%EndFeature
}featurecleanup
countdictstack[{
%%BeginFeature: *InputSlot Upper
%%EndFeature
}featurecleanup
countdictstack[{
%%BeginFeature: *PageRegion LetterSmall
level2 {
2 dict dup /PageSize [612 792] put dup /ImagingBBox [30 31 582 761] put setpagedevice
}{
/lettersmall where {pop lettersmall} {letterR} ifelse
} ifelse
%%EndFeature
}featurecleanup
(Tony)setjob
/mT[1 0 0 -1 30 761]def
/sD 16 dict def
300 level2{1 dict dup/WaitTimeout 4 -1 roll put setuserparams}{statusdict/waittimeout 3 -1 roll put}ifelse
%%IncludeFont: Times-Roman
%%IncludeFont: Times-Italic
%%IncludeFont: Times-Bold
%%IncludeFont: Times-BoldItalic
%%IncludeFont: Symbol
/f0_1/Times-Roman
:mre
/f0_12 f0_1 12 scf
/f0_10 f0_1 10 scf
/f1_1/Times-Italic
:mre
/f1_12 f1_1 12 scf
/f1_10 f1_1 10 scf
/f2_1/Times-Bold
:mre
/f2_12 f2_1 12 scf
/f3_1 f0_1
:v def
/f4_1/Times-BoldItalic
:mre
/f4_12 f4_1 12 scf
/f5_1/Symbol
:bsr
240/apple pd
:esr
/f5_12 f5_1 12 scf
/Courier findfont[10 0 0 -10 0 0]:mf setfont
%%EndSetup
%%Page: 1 1
%%BeginPageSetup
initializepage
(Tony; page: 1 of 10)setjob
%%EndPageSetup
gS 0 0 552 730 rC
60 14 :M
f0_10 sf
.221 .022(In Proceedings of the )J
f1_10 sf
.321 .032(International Symposium on Circuits and Systems)J
f0_10 sf
.257 .026(, pp. 706-709, 1990.)J
64 50 :M
f2_12 sf
2.534 .253(CONSISTENCY AND GENERALIZATION IN INCREMENTALLY TRAINED)J
192 62 :M
3.332 .333(CONNECTIONIST NETWORKS)J
250 86 :M
f1_12 sf
-.11(Tony Martinez)A
159 99 :M
f0_12 sf
-.084(Computer Science Dept., Brigham Young University)A
236 112 :M
.174 .017( Provo, Utah 84602)J
261 151 :M
f2_12 sf
.337(Abstract)A
78 182 :M
f0_12 sf
.33 .033(This paper discusses aspects of consistency and generalization in connectionist networks)J
60 194 :M
.12 .012(which learn through incremental training by examples or rules. Differences between training)J
60 206 :M
-.07(set learning and incremental rule or example learning are presented. Generalization, the ability)A
60 218 :M
.095 .01(to output reasonable mappings when presented with novel input patterns, is discussed in light)J
60 230 :M
1.831 .183(of the above learning methods. In particular, the contrast between )J
f1_12 sf
3.561 .356(hamming distance)J
60 242 :M
f0_12 sf
-.076(generalization and generalizing by high order combinations of )A
f1_12 sf
-.072(critical variables)A
f0_12 sf
-.082( is overviewed.)A
60 254 :M
-.115(Examples of detailed rules for an incremental learning model are presented for both consistency)A
60 266 :M
-.109(and generalization constraints.)A
256 298 :M
12 f3_1 :p
58 :m
-.119(Introduction)A
78 316 :M
1.567 .157(A basic component of neural network mechanisms is the ability to adaptively )J
f1_12 sf
.52(learn)A
60 328 :M
f0_12 sf
.007 .001(mappings [2,9]. Learning takes place as information is presented to the network. The system)J
60 340 :M
-.126(must learn the information such that it can )A
f1_12 sf
-.135(generalize)A
f0_12 sf
-.124(. Generalization is the ability for a system,)A
60 352 :M
.736 .074(when presented with input not encountered during learning, to still produce an output with)J
60 364 :M
.368 .037(good probability of being correct. The class of applications for which neural networks have)J
60 376 :M
-.084(most promise, are exactly those applications for which generalization is possible [8].)A
78 390 :M
.515 .052(There is no constraint on how information is presented to a connectionist system during)J
60 402 :M
.081 .008(learning. Two possible mechanisms are )J
f1_12 sf
.023(examples)A
f0_12 sf
.027 .003( or )J
f1_12 sf
.02(rules)A
f0_12 sf
.073 .007( The distinction between rules and)J
60 414 :M
-.05(examples can be quite fine. Assume a conjunction of boolean inputs with subsequent boolean)A
60 426 :M
-.096(outputs:)A
240 438 :M
.331 .033(A B' C D => X' Y)J
60 454 :M
.364 .036(where W' is the negation of W. Is this an example or a rule? In fact, it could be considered)J
60 466 :M
.453 .045(either. One potential differentiation between rules and examples is that rules may contain a)J
60 478 :M
-.069(smaller subset of the total possible inputs than an example. Assume an input space of the three)A
60 490 :M
-.067(boolean variables {A, B, C} and a single output Z. Assume the following examples:)A
256 506 :M
-.06(A B C => Z)A
254 520 :M
.261 .026(A B' C => Z)J
254 534 :M
.261 .026(A B C' => Z)J
252 548 :M
.617 .062(A B' C' => Z)J
78 564 :M
1.5 .15(Note the obvious correlation between A and Z. In each case Z is high if A is high)J
60 576 :M
-.054(regardless of the setting of the other variables. A rule representing this knowledge could be A)A
60 588 :M
.545 .054(=> Z. In this case the variables B and C are considered as )J
f1_12 sf
.932 .093(don't care)J
f0_12 sf
.799 .08( variables, whereas A)J
60 600 :M
.384 .038(could be considered as a )J
f1_12 sf
.104(critical)A
f0_12 sf
.403 .04( variable. A rule is typically more general than an example)J
60 612 :M
.107 .011(because it contains less variables. One mechanism of generalization in learning systems is to)J
60 624 :M
-.13(manipulate examples into more general rules.)A
78 638 :M
-.119(Two basic mechanism for doing generalization are )A
f1_12 sf
-.127(hamming distance )A
f0_12 sf
-.137(and)A
f1_12 sf
-.107( critical variables)A
f0_12 sf
(.)S
60 650 :M
.006 .001(In a hamming distance mechanism, the system seeks to match the input to learned prototypes,)J
60 662 :M
.817 .082(matching with the prototype with which it has the least number of total mismatches. This)J
endp
%%Page: 2 2
%%BeginPageSetup
initializepage
(Tony; page: 2 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
90 81 :M
f0_12 sf
-.115(prototype then drives the output. With critical variable generalization, the combination of a few)A
90 93 :M
-.058(variables drive the output while others are considered as don't cares.)A
108 107 :M
.851 .085(For example, assume the following examples, augmented from the example above, are)J
90 119 :M
-.075(given to a learning system.)A
286 134 :M
-.06(A B C => Z)A
284 148 :M
.261 .026(A B' C => Z)J
284 162 :M
.261 .026(A B C' => Z)J
282 176 :M
.617 .062(A B' C' => Z)J
278 190 :M
1.288 .129(A' B' C' => Z')J
90 206 :M
-.053(Let A' B C be the input to the system after learning of the above examples. Since there was no)A
90 218 :M
.951 .095(example given of A' B C the system must generalize or output a don't know. If a critical)J
90 230 :M
.015 .002(variable scheme is used, the system could use the correlation of A => Z and A' => Z' as a rule)J
90 242 :M
1.072 .107(with A as a critical variable. Thus, it would output Z'. However, if a hamming distance)J
90 254 :M
.003 0(scheme is used, then A' B C is different by only one variable from A B C => Z and by at least)J
90 266 :M
-.053(two variables from all other examples. Thus, the output would be Z.)A
108 280 :M
-.06(Note that neither of the two options can be said to be correct since we can only guess at the)A
90 292 :M
.854 .085(output of an input for which total information has not been given. Which method is most)J
90 304 :M
.794 .079(promising will depend on specific applications and research. Most current neural network)J
90 316 :M
-.046(schemes use hamming distance generalization. There is evidence that natural nervous systems)A
90 328 :M
.943 .094(have the ability to extract critical input from a large barrage of total inputs and act, while)J
90 340 :M
-.08(ignoring currently unimportant inputs.)A
108 354 :M
.028 .003(When differentiating between examples and rules, another potentially important feature is)J
90 366 :M
.661 .066(whether order of presentation is important. In the )J
f1_12 sf
.97 .097(training set)J
f0_12 sf
.79 .079( scheme, used typically with)J
90 378 :M
-.067(current example driven neural networks, all examples are equally important. The system seeks)A
90 390 :M
1.063 .106(to average out the information of the many equivalent examples to derive a classification)J
90 402 :M
1.729 .173(mapping. Rules may also be input in incremental fashion, where the order of input is)J
90 414 :M
1.658 .166(important. Consider the natural training scheme of learning general rules, followed by)J
90 426 :M
.145 .015(refinement through learning exceptions to the general rules. In this case the general rules are)J
90 438 :M
.886 .089(still a valid default, but the specific case of the exception rule has higher priority than the)J
90 450 :M
-.12(general rule. This type of learning can be labeled as )A
f1_12 sf
-.136(incremental)A
f0_12 sf
(.)S
108 464 :M
.192 .019(Incremental learning schemes have the advantage of naturally encapsulating the common)J
90 476 :M
1.468 .147(general to specific learning scheme. It also appears to be advantageous when the input)J
90 488 :M
.035 .003(examples \(or rules\) are more accurate. On the other hand, the training set scheme holds more)J
90 500 :M
.163 .016(promise when the input is noisy and has no natural priority of one example to the next. Both)J
90 512 :M
-.1(techniques have their place and hybrids may be advantageous.)A
108 526 :M
1.174 .117( A learning system using incrementally input rules can be maintained )J
f1_12 sf
.3(consistent)A
f0_12 sf
.6 .06(. By)J
90 538 :M
-.067(consistent it is meant that no two rules which can be simultaneously matched and which output)A
90 550 :M
-.086(opposite values should be in the same rule set. This means that rules must be modified in order)A
90 562 :M
.022 .002(to maintain consistency. If new rules are given precedence, then old rules which could match)J
90 574 :M
.035 .003(with the new rule and which give different output, are deleted or modified such that matching)J
90 586 :M
-.134(cannot take place.)A
108 600 :M
.038 .004(These rules can also be minimized such that the same information is represented by fewer)J
90 612 :M
-.064(rules or variables. This is also a type of generalization. For example, the deletion of don't care)A
90 624 :M
1.683 .168(variables allows only critical variables to remain in rules, thus making critical variable)J
90 636 :M
-.1(generalization possible.)A
184 655 :M
12 f3_1 :p
262 :m
-.12(Consistency and Generalization in Incremental Systems)A
108 676 :M
.858 .086(A class of new connectionist models which uses both incremental learning and critical)J
90 688 :M
.873 .087(variable generalization is ASOCS \(Adaptive Self-Organizing Concurrent Systems\) [3,4,7].)J
90 700 :M
.097 .01(ASOCS is a parallel adaptive system which functions in two modes: processing and learning.)J
endp
%%Page: 3 3
%%BeginPageSetup
initializepage
(Tony; page: 3 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
90 81 :M
f0_12 sf
.075 .007(During processing, ASOCS functions like a parallel hardware circuit mapping boolean inputs)J
90 93 :M
-.072(to boolean outputs. During learning the systems accepts if-then rules in an incremental fashion)A
90 105 :M
-.083(and reconfigures the network so as to maintain consistency. ASOCS models guarantee learning)A
90 117 :M
-.001(of arbitrary boolean mappings, and learn any rules in time )A
f1_12 sf
(O\(log\(n\)\))S
f0_12 sf
( where )S
f1_12 sf
(n)S
f0_12 sf
( is the number of)S
90 129 :M
-.057(nodes or rules in the network. There are a number of different ASOCS learning algorithms and)A
90 141 :M
-.008(systems and discussion of their mechanisms is found elsewhere [3,5,6].)A
108 158 :M
.476 .048(This paper discusses the basic knowledge input of an ASOCS system and how it is kept)J
90 170 :M
-.121(consistent at a high level, independent from a specific ASOCS implementation.)A
108 187 :M
-.049(The atomic input to the system is called an )A
f1_12 sf
-.052(instance)A
f0_12 sf
-.05(. An instance is made up of a vector of)A
90 199 :M
-.088(boolean inputs and a single boolean output. For example:)A
108 216 :M
.807 .081(A B' => Z')J
108 233 :M
.326 .033(B C D => C')J
108 250 :M
.434 .043(D E' => X)J
108 267 :M
1.336 .134(An instance specifies what the system should output if the current input matches the)J
90 279 :M
.088 .009(instance. So, for the instance D E' => X, the system must output X as high if D is high and E)J
90 291 :M
.005 .001(is low, regardless of the setting of any other input variables. This instance says nothing about)J
90 303 :M
-.038(what X should be when D is not high or D in not low.)A
108 320 :M
-.143(The vector of input variables in and instance is called a )A
f1_12 sf
-.14(variable-list)A
f0_12 sf
(.)S
108 337 :M
-.108(An instance whose output is negated is a )A
f1_12 sf
-.108(negative )A
f0_12 sf
-.112(instance. An instance with a non-negated)A
90 349 :M
.042 .004(output is a )J
f1_12 sf
.015(positive)A
f0_12 sf
.052 .005( instance. Thus, an instance can have a positive or negative )J
f1_12 sf
.016(polarity)A
f0_12 sf
.041 .004(. Two)J
90 361 :M
-.012(instances with the same polarity are )A
f1_12 sf
-.014(concordant)A
f0_12 sf
-.013(, while two instance with opposite polarity are)A
90 373 :M
f1_12 sf
-.131(discordant)A
f0_12 sf
-.122( with respect to each other.)A
108 390 :M
-.013(Instances are input incrementally. The most recent instance is given precedence, although)A
90 402 :M
-.038(that is not the only possible strategy. The current totality of instances is called the )A
f1_12 sf
-.043(instance set)A
90 414 :M
(\(IS\).)S
f0_12 sf
.027 .003( An instance set is maintained )J
f1_12 sf
.009(consistent)A
f0_12 sf
.03 .003(. In a consistent set no two discordant instances)J
90 426 :M
-.113(can simultaneously be matched.)A
108 443 :M
-.047(Consistency between any two discordant instances is assured when there exists at least one)A
90 455 :M
f1_12 sf
-.06(discriminant variable)A
f0_12 sf
-.058( for the two instances. A discriminant variable is an input variable which)A
90 467 :M
.491 .049(is negated in one of the instances and not negated in the other. Assume the following three)J
90 479 :M
(instances.)S
108 496 :M
-.043(\(1\) A B => Z)A
108 513 :M
.597 .06(\(2\) B' C => Z')J
108 530 :M
.21 .021(\(3\) A C => Z')J
108 547 :M
-.113(The first two instance are consistent since they contain the discriminant variable B. Since B)A
90 559 :M
.683 .068(can never be simultaneously high and low, these two instance can never simultaneously be)J
90 571 :M
-.019(matched. Instances 2 and 3 are consistent because they are concordant. However, instances 1)A
90 583 :M
-.113(and 3 are inconsistent because they are discordant and contain no discriminant variable.)A
108 600 :M
.208 .021(If we assume that instance 3 is the most recent instance, then the system could have been)J
90 612 :M
.031 .003(made consistent by deleting instance 1. However, that is overkill in this case. We would like)J
90 624 :M
-.084(to keep all the information from old instances except for that which is specifically contradicted.)A
90 636 :M
-.024(In this case, we need to add a discriminant variable to instance 1. By adding C' to the variable)A
90 648 :M
1.688 .169(list of instance 1, the instance set becomes consistent, while still retaining all previous)J
90 660 :M
.89 .089(information except that specifically contradicted by the new instance. This mechanism of)J
90 672 :M
-.15(maintaining consistency is called )A
f1_12 sf
-.159(discriminant variable addition \(DVA\))A
f0_12 sf
(.)S
108 689 :M
.127 .013(We now overview how an instance set is maintained consistent when a new instance \(NI\))J
90 701 :M
-.052(is introduced. All comparisons are pairwise between the NI and each old instance \(OI\). To do)A
endp
%%Page: 4 4
%%BeginPageSetup
initializepage
(Tony; page: 4 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
90 81 :M
f0_12 sf
.074 .007(this we must classify how a NI can match with a NI. This is shown by example. Assume the)J
90 93 :M
-.105(variable list \(we currently ignore polarity\) of the NI is)A
108 110 :M
.204 .02(A B' D)J
108 127 :M
-.033(Assume the following variable lists of OI's.)A
108 144 :M
(A)S
198 144 :M
(Subset)S
108 161 :M
.121(A)A
f5_12 sf
( )S
f0_12 sf
.145 .014(B' D)J
198 161 :M
-.248(Equal)A
108 178 :M
(A)S
f5_12 sf
( )S
f0_12 sf
.029 .003(B' D E)J
198 178 :M
-.045(Superset)A
108 195 :M
.751(B')A
f5_12 sf
.403 .04( )J
f0_12 sf
1.402(E')A
198 195 :M
-.274(Overlap)A
108 212 :M
.145 .014(C G)J
198 212 :M
-.274(Overlap)A
108 229 :M
.571(A)A
f5_12 sf
.18 .018( )J
f0_12 sf
.335(B')A
f5_12 sf
.18 .018( )J
f0_12 sf
.714(D')A
198 229 :M
-.248(Discriminated)A
126 246 :M
-.086(An OI is )A
f1_12 sf
-.087(subset)A
f0_12 sf
-.076( if its variable-list is a subset of the variables of the NI.)A
108 263 :M
-.125(An OI is )A
f1_12 sf
-.135(equal)A
f0_12 sf
-.076( )A
f1_12 sf
-.076( )A
f0_12 sf
-.112(if its variable-list is the same as that of the NI.)A
108 280 :M
.138 .014(An OI is )J
f1_12 sf
.275 .027(superset )J
f0_12 sf
.194 .019(if it has more variables than the NI, but every NI variable occurs in the)J
90 292 :M
.055 .005(OI's variable list.)J
108 309 :M
-.102(An OI is )A
f1_12 sf
-.103(overlap )A
f0_12 sf
-.096(if there is no discriminant variable between the NI and the OI, and it is )A
f1_12 sf
-.159(not)A
90 321 :M
f0_12 sf
.306 .031(subset, equal, or superset.)J
108 338 :M
.209 .021(An OI is )J
f1_12 sf
.094(discriminated)A
f0_12 sf
.3 .03( if it contains at least one discriminant variable \()J
f1_12 sf
.16(D)A
f0_12 sf
.308 .031( in the example)J
90 350 :M
-.091(above\) relative to the NI.)A
90 367 :M
12 f3_1 :p
161.999 :m
-.102(Consistency: Discordant Instances)A
108 384 :M
1.079 .108(Following are the modifications necessary for any OI discordant to the NI in order to)J
90 396 :M
-.046(maintain a consistent instance set. A NI is )A
f1_12 sf
-.053(broadcast)A
f0_12 sf
-.045( to all of the OI. In each case, the view is)A
90 408 :M
-.019(taken from an OI, how it matches with the NI, and what action should take place. In an actual)A
90 420 :M
-.078(ASOCS implementation, the logical modification to OI's is done in parallel in a self-organizing)A
90 432 :M
.098(network.)A
90 449 :M
-.021(I. OI Superset:)A
108 466 :M
-.11(NI: A B)A
108 483 :M
-.208(OI: A B C)A
108 500 :M
-.371(Delete OI)A
90 534 :M
-.081(II. OI Equal:)A
108 551 :M
-.11(NI: A B)A
108 568 :M
-.276(OI: A B)A
108 585 :M
-.246(Delete NI)A
90 619 :M
(III. OI Subset:)S
108 636 :M
-.083(NI: A B C)A
108 653 :M
-.276(OI: A B)A
108 670 :M
-.108(DVA on OI: \(In this case the OI becomes A B C'\))A
90 704 :M
-.163(IV. OI Overlap:)A
endp
%%Page: 5 5
%%BeginPageSetup
initializepage
(Tony; page: 5 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
108 81 :M
f0_12 sf
-.083(NI: A B C)A
108 98 :M
-.237(OI1: C D)A
108 115 :M
-.283(OI2: D E)A
108 132 :M
-.12(DVA on OI \(Note that DVA can cause creation of multiple modifications. OI1 becomes A')A
90 144 :M
.058 .006(C D )J
f1_12 sf
.037(and)A
f0_12 sf
.069 .007( B' C D\).)J
90 178 :M
-.176(V. Discrminated:)A
108 195 :M
.438 .044(NI: A B')J
108 212 :M
-.208(OI: A B C)A
108 229 :M
-.04(No Change)A
90 263 :M
12 f3_1 :p
222.001 :m
-.127(Minimization: Concordant Instances \(Pairwise\))A
108 280 :M
.519 .052(An instance set is made )J
f1_12 sf
.777 .078(minimal )J
f0_12 sf
.759 .076(through deletion of redundant instances and variables.)J
90 292 :M
-.072(Complete minimality is not typically a goal due to its complexity. However, much minimizing)A
90 304 :M
.062 .006(can be done through pairwise comparison of the NI to OI's. This attains parsimony or )J
f1_12 sf
.02(partial)A
90 316 :M
f0_12 sf
-.166(minimization.)A
108 333 :M
.283 .028(Minimization aids generalization by deleting don't care variables and discovering critical)J
90 345 :M
.209 .021(variables. This does one type of generalization. The mechanism of generalization for inputs)J
90 357 :M
-.01(which do not match the minimized instance set is dependent on system implementation. Both)A
90 369 :M
2.307 .231(hamming distance or critical variable generalization can then be accomplished at the)J
90 381 :M
-.033(implementation level. This is discussed elsewhere [1,3,5].)A
108 398 :M
-.105(There is one more important matching type between concordant instances for minimization.)A
90 410 :M
-.033(Assume the NI A B C and the OI's as follows:)A
108 427 :M
.903 .09(A B')J
198 427 :M
-.097(one-difference subset)A
108 444 :M
.468 .047(A B C')J
198 444 :M
-.154(one-difference equal)A
108 461 :M
.137 .014(A B C' D)J
198 461 :M
-.102(one-difference superset)A
108 478 :M
.125 .012(Two instances are )J
f1_12 sf
.033(one-difference)A
f0_12 sf
.133 .013( if they are concordant, contain exactly one discriminant)J
90 490 :M
-.001(variable, and are otherwise subset, equal, or superset. In this case the variable is called a )A
f1_12 sf
(one-)S
90 502 :M
-.084(difference)A
f0_12 sf
-.086( variable.)A
108 519 :M
-.086(Following are types of minimization possible for different matchings of OI to NI.)A
90 536 :M
-.02(I. OI Superset:)A
108 553 :M
-.11(NI: A B)A
108 570 :M
-.208(OI: A B C)A
108 587 :M
-.371(Delete OI)A
90 621 :M
-.081(II. OI Equal:)A
108 638 :M
-.11(NI: A B)A
108 655 :M
-.276(OI: A B)A
108 672 :M
-.246(Delete NI)A
90 706 :M
(III. OI Subset:)S
endp
%%Page: 6 6
%%BeginPageSetup
initializepage
(Tony; page: 6 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
108 81 :M
f0_12 sf
-.083(NI: A B C)A
108 98 :M
-.276(OI: A B)A
108 115 :M
-.246(Delete NI)A
90 149 :M
-.135(IV. OI One-Difference Subset:)A
108 166 :M
.302 .03(NI: A B' C)J
108 183 :M
-.276(OI: A B)A
108 200 :M
-.132(Rebroadcast the Modified NI without the one-difference variable)A
108 212 :M
.208 .021(\(Note here that it is never )J
f1_12 sf
.077(necessary)A
f0_12 sf
.225 .023( to rebroadcast an instance to the network. It can aid)J
90 224 :M
1.059 .106(parsimony, but can increase learning time. In an actual system this is an implementation)J
90 236 :M
(decision\).)S
90 270 :M
-.182(V. OI One-Difference Equal:)A
108 287 :M
.438 .044(NI: A B')J
108 304 :M
-.276(OI: A B)A
108 321 :M
-.254(Delete OI &)A
90 338 :M
-.132(Rebroadcast NI modified by deleting the one-difference variable)A
90 372 :M
-.136(VI. OI One-Difference Superset:)A
108 389 :M
.438 .044(NI: A B')J
108 406 :M
-.208(OI: A B C)A
108 423 :M
-.17(Delete the one-difference variable from the OI &)A
108 435 :M
-.146(Optionally rebroadcast the modified OI \(see note above\))A
197 477 :M
12 f3_1 :p
235.001 :m
-.133(Consistency and Minimization in Priority ASOCS)A
108 498 :M
-.102(Another scheme for maintaining a consistent instance set is to augment each instance with a)A
90 510 :M
.397 .04(priority [1]. Assume each NI is given a priority 1 higher than previous instances. Then if a)J
90 522 :M
.379 .038(conflict ever occurs between instances, the instance with the highest priority sets the output.)J
90 534 :M
-.103(This obviates the need for DVA \(discriminant variable addition\), thus guaranteeing that the size)A
90 546 :M
-.029(of the instance set grows by at most one, when any NI is presented.)A
108 563 :M
.027 .003(Following is an overview of how OI's are modified in a priority instance system. Assume)J
90 575 :M
-.086(that the NI is always added with a higher priority unless specifically noted.)A
90 609 :M
161.999 :m
-.102(Consistency: Discordant Instances)A
90 626 :M
-.021(I. OI Superset:)A
108 643 :M
-.11(NI: A B)A
108 660 :M
-.208(OI: A B C)A
108 677 :M
-.33( Delete OI)A
endp
%%Page: 7 7
%%BeginPageSetup
initializepage
(Tony; page: 7 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
90 81 :M
f0_12 sf
-.081(II. OI Equal:)A
108 98 :M
-.11(NI: A B)A
108 115 :M
-.276(OI: A B)A
108 132 :M
-.371(Delete OI)A
90 166 :M
(III. OI Subset:)S
108 183 :M
-.083(NI: A B C)A
108 200 :M
-.276(OI: A B)A
108 217 :M
-.04(No Change)A
90 251 :M
-.163(IV. OI Overlap:)A
108 268 :M
-.083(NI: A B C)A
108 285 :M
-.237(OI1: C D)A
108 302 :M
-.283(OI2: D E)A
108 319 :M
-.04(No Change)A
90 353 :M
-.176(V. Discrminated:)A
108 370 :M
.438 .044(NI: A B')J
108 387 :M
-.208(OI: A B C)A
108 404 :M
-.04(No Change)A
90 438 :M
12 f3_1 :p
170.001 :m
-.16(Minimization: Concordant Instances)A
90 455 :M
-.211(Definitions:)A
108 472 :M
.394 .039(PR\(I\) - returns integer priority of the instance \(I\) for the current output variable. Higher)J
90 484 :M
-.061(number signifies higher priority.)A
108 501 :M
-.096(Cont-Greater\(OI\) - returns true)A
108 513 :M
-.074( if there exists)A
108 525 :M
-.022( {I | \(I contradicts NI\) & \(PR\(I\) > PR\(OI\)\)})A
90 537 :M
-.057( else false)A
108 554 :M
f1_12 sf
-.055(Cont-Greater)A
f0_12 sf
-.046( returns true for a specific OI-NI pair, if there exists a different OI \(OI2\) such)A
90 566 :M
-.111(that OI2 matches the NI and has priority greater than OI.)A
108 583 :M
-.086(Following are types of minimization possible for different matchings of NI to OI.)A
90 600 :M
-.02(I. OI Superset:)A
108 617 :M
-.11(NI: A B)A
108 634 :M
-.208(OI: A B C)A
108 651 :M
-.33( Delete OI)A
90 685 :M
-.081(II. OI Equal:)A
108 702 :M
-.11(NI: A B)A
endp
%%Page: 8 8
%%BeginPageSetup
initializepage
(Tony; page: 8 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
108 81 :M
f0_12 sf
-.276(OI: A B)A
108 98 :M
-.371(Delete OI)A
90 132 :M
(III. OI Subset:)S
108 149 :M
-.083(NI: A B C)A
108 166 :M
-.276(OI: A B)A
108 183 :M
-.108(if Cont_Greater\(OI\) then Add NI)A
108 195 :M
-.057( else Delete NI)A
108 212 :M
-.079(NI: A B C D => Z)A
108 229 :M
1.263 .126(IS1: ... , A => Z', ... ,AB => Z,...,ABC => Z',...)J
108 246 :M
1.102 .11(IS2: ... , A => Z', ... ,AB => Z,...)J
108 263 :M
1.029 .103(\(In these examples ISn represent different instance sets. The left-most instances have)J
90 275 :M
.336 .034(lower priority. Typically the multiple instance sets shown give examples of each possibility)J
90 287 :M
-.073(when an )A
f1_12 sf
-.061(if-then-else)A
f0_12 sf
-.064( strategy is used for modification.\))A
90 321 :M
-.136(IV. OI One-Difference Superset:)A
108 338 :M
.438 .044(NI: A B')J
108 355 :M
-.208(OI: A B C)A
108 372 :M
-.16(Delete one-difference variable from OI & Add NI)A
108 389 :M
.254 .025(NI: A B' => Z)J
108 406 :M
1.222 .122(IS1: ... , A => Z', ... ,ABC => Z,...,AD => Z',...)J
90 440 :M
-.182(V. OI One-Difference Equal:)A
108 457 :M
.438 .044(NI: A B')J
108 474 :M
-.276(OI: A B)A
108 491 :M
-.117(if Cont_Greater\(OI\) Add NI & Remove one-difference )A
378 491 :M
-.174(variable from OI)A
108 503 :M
-.152(else Delete OI & Rebroadcast NI modified by deleting)A
126 515 :M
-.163(the one-difference variable)A
108 532 :M
.254 .025(NI: A B' => Z)J
108 549 :M
1.272 .127(IS1: ... , A => Z', ... ,AB => Z,...,AC => Z',...)J
108 566 :M
1.102 .11(IS2: ... , A => Z', ... ,AB => Z,...)J
90 600 :M
-.135(VI. OI One-Difference Subset:)A
108 617 :M
.302 .03(NI: A B' C)J
108 634 :M
-.276(OI: A B)A
108 651 :M
-.104(if Cont_Greater\(OI\) Add NI)A
126 663 :M
-.122(else Rebroadcast the Modified NI without the one-difference )A
126 675 :M
-.096(variable and then,)A
184 687 :M
-.102(if Cont_Greater\(OI\) then just add original NI)A
198 699 :M
-.128(else continue with modified broadcast)A
endp
%%Page: 9 9
%%BeginPageSetup
initializepage
(Tony; page: 9 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
108 81 :M
f0_12 sf
.205 .021(NI: A B' C => Z)J
108 98 :M
1.272 .127(IS1: ... , A => Z', ... ,AB => Z,...,AC => Z',...)J
108 115 :M
1.228 .123(IS2: ... , A => Z', ... ,AB => Z,...,BC = > Z',...)J
108 132 :M
1.102 .11(IS3: ... , A => Z', ... ,AB => Z,...)J
285 166 :M
12 f3_1 :p
59 :m
-.212(Simultaneity)A
108 183 :M
.35 .035(The question arises of whether the NI can be simultaneously tested against all OI's or do)J
90 195 :M
1.791 .179(different actions require an ordering. The answer is they can be done simultaneously.)J
90 207 :M
2.876 .288(However, for minimization, improved parsimony can be attained if the consistency)J
90 219 :M
-.096(modifications are done first, followed by minimization. For example:)A
108 236 :M
-.04(NI: A B C => Z)A
108 253 :M
1.263 .126(IS1: ... , A => Z', ... ,AB => Z,...,ABC => Z',...)J
108 270 :M
.48 .048(If ABC => Z' had not initially been deleted by consistency, the NI ABC => Z could not)J
90 282 :M
.255 .025(have been deleted by the OI AB => Z because cont-greater would still return true. Note that)J
90 294 :M
-.1(consistency is maintained either way.)A
108 311 :M
.02 .002(We also noted that rebroadcast of a modified instance is optional. It can lead to improved)J
90 323 :M
-.081(parsimony at the cost of greater time complexity. Assume the following NI and IS.)A
108 340 :M
.205 .021(NI: A B C' => Z)J
108 357 :M
1.204 .12(IS1: ... , A => Z', ... ,AB => Z,...,BC => Z,...)J
108 374 :M
.254 .025(NI: A B' => Z)J
108 391 :M
1.272 .127(IS1: ... , A => Z', ... ,AB => Z,...,AC => Z',...)J
108 408 :M
-.029(The NI can be minimized to A => Z by one-difference equal with AB => Z. If the A => Z)A
90 420 :M
1.048 .105(is then rebroadcast, both Ac => Z' and A => Z will be deleted. Without rebroadcast, the)J
90 432 :M
-.043(system would have remained consistent, but less parsimonious.)A
288 446 :M
54 :m
-.074(Conclusion)A
108 464 :M
-.021(This paper has discussed concepts of learning and generalization in connectionist systems.)A
90 476 :M
-.087(In particular, it has pointed out that there are a number of mechanisms for fulfilling these goals,)A
90 488 :M
-.044(each having advantages for specific classes of applications. Potential schemes for maintaining)A
90 500 :M
.496 .05(consistency and minimization for incremental systems were presented for two different rule)J
90 512 :M
.07 .007(models. Ongoing research seeks to improve speed of learning and accuracy of generalization)J
90 524 :M
-.079(in connectionist learning systems.)A
284 543 :M
62 :m
-.12(Bibliography)A
90 567 :M
1(1.)A
117 567 :M
.365 .037(Hughes, B. )J
f1_12 sf
.69 .069(Prioritized Rule Systems)J
f0_12 sf
.45 .045(, M.S. Thesis, C. S. Dept., BYU, 1989.)J
90 587 :M
1(2.)A
117 587 :M
.348 .035(Kohonen, T., )J
f1_12 sf
.638 .064(Self-organization and associative memory)J
f0_12 sf
.468 .047(, Springer Verlag, New York,)J
117 599 :M
.169(\(1984\).)A
90 619 :M
1(3.)A
117 619 :M
1.574 .157(Martinez, T. R., )J
f1_12 sf
3.218 .322(Adaptive Self-Organizing Logic Networks)J
f0_12 sf
2.688 .269(, Ph.D. Dissertation,)J
117 631 :M
1.53 .153(Technical Report - CSD 860093, University of California, Los Angeles, CA \(May)J
117 643 :M
.201(1986\).)A
90 663 :M
1(4.)A
117 663 :M
.965 .097(Martinez T. R., Models of Parallel Adaptive Logic, )J
f4_12 sf
.148 .015( )J
f1_12 sf
1.184 .118(Proceedings of the 1987 IEEE)J
117 675 :M
.057 .006(Systems Man and Cybernetics Conference)J
f0_12 sf
.047 .005(, pp. 290-296, \(October, 1987\).)J
90 695 :M
1(5.)A
117 695 :M
.029 .003(Martinez, T. R. and J. J. Vidal, Adaptive Parallel Logic Networks, )J
f1_12 sf
.045 .005(Journal of Parallel)J
117 707 :M
.436 .044(and Distributed Computing)J
f0_12 sf
.165 .017(, Vol. )J
f2_12 sf
.089(5)A
f0_12 sf
.246 .025(, No. 1, pp. 26-58, \(1988\).)J
endp
%%Page: 10 10
%%BeginPageSetup
initializepage
(Tony; page: 10 of 10)setjob
%%EndPageSetup
-30 -31 :T
gS 30 31 552 730 rC
90 81 :M
f0_12 sf
1(6.)A
117 81 :M
.167 .017(Martinez, T. R., Digital Neural Networks, )J
f1_12 sf
.195 .019(Proceedings of the 1988 IEEE Systems Man)J
117 93 :M
.076 .008(and Cybernetics Conference)J
f0_12 sf
.048 .005(, pp. 681-684, \(August, 1988\).)J
90 113 :M
1(7.)A
117 113 :M
.471 .047(Martinez, T. R., Adaptive Self-Organizing Concurrent Systems, in )J
f1_12 sf
.527 .053(Progress in Neural)J
117 125 :M
.101(Networks)A
f0_12 sf
.407 .041(, Ablex Publishing, 1989.)J
90 145 :M
1(8.)A
117 145 :M
1.939 .194(Martinez, T. R., Neural Network Applicability: Classifying the Problem Space,)J
117 157 :M
f1_12 sf
1.004 .1(Proceedings of the IASTED International Symposium on Expert Systems and Neural)J
117 169 :M
.199(Networks)A
f0_12 sf
.68 .068(, pp. 41-44, August, 1989.)J
90 189 :M
1(9.)A
117 189 :M
-.003(Rumelhart, D. and McClelland, J., )A
f1_12 sf
-.003(Parallel Distributed Processing: Explorations in the)A
117 201 :M
-.005(Microstructure of Cognition)A
f0_12 sf
-.005(, Vol. I, MIT Press, \(1986\).)A
endp
%%Trailer
end
%%EOF