ARTICLE{Brunel96hebbianlearning, author = {Nicolas Brunel}, title = {Hebbian learning of context in recurrent neural networks}, journal = {Neural Computation}, year = {1996}, volume = {8}, pages = {1677--1710} } not-yet article{citeulike:2439689, abstract = {Contrary to Hopfield-like networks, random recurrent neural networks (RRNN), where the couplings are random, exhibit complex dynamics (limit cycles, chaos). It is possible to store information in these networks through Hebbian learning. Eventually, learning "destroys" the dynamics and leads to a fixed point attractor. We investigate here the structural changes occurring in the network through learning. We show that a simple Hebbian learning rule organizes synaptic weight redistribution on the network from an initial homogeneous and random distribution to a heterogeneous one, where strong synaptic weights preferentially assemble in triangles. Hence learning organizes the network of the large synaptic weights as a "small-world" one 10.1177/105971230601400204}, author = {Berry, Hugues and Quoy, Mathias }, citeulike-article-id = {2439689}, comment = {hopfield fixed points limit cycles}, doi = {http://dx.doi.org/10.1177/105971230601400204}, journal = {Adaptive Behavior}, keywords = {dynamical-systems, dynamics, networks, neural, recurrent, structure}, month = {June}, number = {2}, pages = {129--137}, posted-at = {2008-02-28 04:03:14}, priority = {2}, title = {Structure and Dynamics of Random Recurrent Neural Networks}, url = {http://dx.doi.org/10.1177/105971230601400204}, volume = {14}, year = {2006} }