@Article{MaassETAL:01a,
author = {W. Maass and T. Natschl\"ager and H. Markram},
title = {Real-time Computing Without Stable States: A New
Framework for Neural Computation Based on
Perturbations},
journal = {Neural Computation},
volume = 14,
number = 11,
pages = {2531-2560},
year = 2002,
abstract = {A key challenge for neural modeling is to explain
how a continuous stream of multi-modal input from a
rapidly changing environment can be processed by
stereotypical recurrent circuits of
integrate-and-fire neurons in real-time. We propose
a new framework for neural computation that provides
an alternative to previous approaches based on
attractor neural networks. It is shown that the
inherent transient dynamics of the high-dimensional
dynamical system formed by a neural circuit may
serve as a universal source of information about
past stimuli, from which readout neurons can extract
particular aspects needed for diverse tasks in
real-time. Stable internal states are not required
for giving a stable output, since transient internal
states can be transformed by readout neurons into
stable target outputs due to the high dimensionality
of the dynamical system. Our approach is based on a
rigorous computational model, the liquid state
machine, that unlike Turing machines, does not
require sequential transitions between discrete
internal states. Like the Turing machine paradigm it
allows for universal computational power under
idealized conditions, but for real-time processing
of time-varying input. The resulting new framework
for neural computation has novel implications for
the interpretation of neural coding, for the design
of experiments and data-analysis in neurophysiology,
and for neuromorphic engineering.},
url = {papers/lsm-nc-130.pdf}
}
@InCollection{NatschlaegerETAL:02a,
author = {T. Natschl\"{a}ger and H. Markram and W. Maass},
title = {Computer Models and Analysis Tools for Neural
Microcircuits},
booktitle = {A Practical Guide to Neuroscience Databases and Associated
Tools},
publisher = {Kluver Academic Publishers (Boston)},
year = {2002},
editor = {R. K\"otter},
chapter = {9},
url = {papers/lsm-koetter-chapter-144.pdf}
}
@InCollection{MaassETAL:02d,
author = {W. Maass and T. Natschl\"ager and H. Markram},
title = {Computational Models for Generic Cortical Microcircuits},
booktitle = {Computational Neuroscience: A Comprehensive Approach},
publisher = {CRC-Press},
year = {2002},
editor = {J. Feng},
url = {papers/lsm-feng-chapter-149.pdf}
}
@Article{MaassETAL:02c,
author = {W. Maass and T. Natschl\"ager and H. Markram},
title = {A Fresh Look at Real-Time Computation in Generic Recurrent
Neural Circuits},
journal = {submitted for publication},
year = {2002},
url = {papers/lsm-fresh-148.pdf}
}
@InCollection{MaassMarkram:02a,
author = {W. Maass and H. Markram},
title = {Temporal Integration in Recurrent Microcircuits},
booktitle = {The Handbook of Brain Theory and Neural Networks},
publisher = {MIT Press (Cambridge)},
year = {2002},
editor = {M. A. Arbib},
edition = {2nd},
}
@Article{NatschlaegerETAL:02,
author = {T. Natschl\"{a}ger and W. Maass and H. Markram},
title = {The "Liquid Computer": A Novel Strategy for Real-Time
Computing on Time Series},
journal = {Special Issue on Foundations of Information Processing of
{TELEMATIK}},
year = {2002},
pages = {39--43},
volume = {8},
number = {1},
abstract = {We will discuss in this survey article a new framework for
analysing computations on time series and in particular on
spike trains, introduced in (Maass et. al. 2002). In
contrast to common computational models this new framework
does not require that information can be stored in some
stable states of a computational system. It has recently
been shown that such models where all events are transient
can be successfully applied to analyse computations in
neural systems and (independently) that the basic ideas can
also be used to solve engineering tasks such as the design
of nonlinear controllers. Using an illustrative example we
will develop the main ideas of the proposed model. This
illustrative example is generalized and cast into a
rigorous mathematical model: the Liquid State Machine. A
mathematical analysis shows that there are in principle no
computational limitations of liquid state machines in the
domain of time series computing. Finally we discuss several
successful applications of the framework in the area of
computational neuroscience and in the field of artificial
neural networks.},
url = {papers/lsm-telematik.pdf}
}
@TechReport{Jaeger2001a,
author = {H. Jaeger},
title = {The "echo state" approach to analysing and training recurrent neural networks},
institution = {German National Research Center for Information
Technology},
year = 2001,
number = {GMD Report 148},
url = {http://www.faculty.iu-bremen.de/hjaeger/pubs/EchoStatesTechRep.pdf}
}
@article{Jaeger2004,
author = {H. Jaeger, H. Haas},
title = {Harnessing Nonlinearity: Predicting Chaotic Systems and Saving Energy in Wireless Communication},
journal = {Science},
year = {2004},
pages = {78--80},
month = {April 2},
url = {http://www.faculty.iu-bremen.de/hjaeger/pubs/ESNScience04.pdf}
}
@Article{MaassMarkram:02,
author = {W. Maass and H. Markram},
title = {On the Computational Power of Recurrent Circuits of
Spiking Neurons},
journal = {Journal of Computer and System Sciences},
year = {2004},
pages = {593-616},
volume = {69(4)},
number = {4},
url = {http://www.igi.tugraz.at/maass/psfiles/135-revised-elsart.pdf},
}
@TechReport{Jaeger2001b,
author = {H. Jaeger},
title = {Short term memory in echo state networks},
institution = {German National Research Center for Information
Technology},
year = 2001,
number = {GMD Report 152},
url = {http://www.faculty.iu-bremen.de/hjaeger/pubs/STMEchoStatesTechRep.pdf}
}
@Article{HaeuslerETAL:03,
author = {S. H\"{a}usler and H. Markram and W. Maass},
title = {Perspectives of the High Dimensional Dynamics of
Neural Microcircuits from the Point of View of Low
Dimensional Readouts},
journal = {Complexity (special issue on Complex Adaptive
Systems)},
year = {2003},
pdf = {http://www.lsm.tugraz.at/papers/haeusler_etal_2003.pdf},
url = {research/haeusler_etal_2003/index.html}
}
@Article{LegensteinETAL:02,
author = {R. A. Legenstein and H. Markram and W. Maass},
title = {Input Prediction and Autonomous Movement Analysis in
Recurrent Circuits of Spiking Neurons},
year = {2003},
pages = {5-19},
volume = {14(1-2)},
journal = {Reviews in the Neurosciences (Special Issue on Neuroinformatics of Neural and Artificial Computation)},
url = {papers/lsm-vision-140.pdf},
}
@InProceedings{MaassETAL:02a,
author = {W. Maass and R. A. Legenstein and H. Markram},
title = {A New Approach towards Vision suggested by Biologically
Realistic Neural Microcircuit Models},
booktitle = {Proc. of the 2nd Workshop on Biologically Motivated
Computer Vision},
series = {Lecture Notes in Computer Science},
year = {2002},
month = {Nov.},
publisher = {Springer},
url = {papers/lsm-vision-146.pdf},
}
@Article{MaassETAL:02b,
author = {W. Maass and T. Natschl\"{a}ger and H. Markram},
title = {A Model for Real-Time Computation in Generic Neural
Microcircuits},
journal = {Proc. of NIPS 2002},
pages = {229-236},
volume = {15},
publisher = {MIT Press},
year = 2002,
url = {papers/lsm-nips-147.pdf}
}
@Article{NatschlaegerMaass:2001,
author = {T. Natschl\"{a}ger and W. Maass},
title = {Spiking Neurons and the Induction of Finite State
Machines},
journal = {Theoretical Computer Science: Special Issue on
Natural Computing},
volume = 287,
year = 2002,
pages = {251--265},
abstract = {We discuss in this short survey article some current
mathematical models from neurophysiology for the
computational units of biological neural systems: neurons
and synapses. These models are contrasted with the
computational units of common artificial neural network
models, which reflect the state of knowledge in
neurophysiology 50 years ago. We discuss the problem of
carrying out computations in circuits consisting of
biologically realistic computational units, focusing on the
biologically particularly relevant case of computations on
time series. Finite state machines are frequently used in
computer science as models for computations on time series.
One may argue that these models provide a reasonable common
conceptual basis for analyzing computations in computers
and biological neural systems, although the emphasis in
biological neural systems is shifted more towards
asynchronous computation on analog time series. In the
second half of this article some new computer experiments
and theoretical results are discussed, which address the
question whether a biological neural system can in
principle learn to behave like a given simple finite state
machine. }
}
@Unpublished{Jaeger2002b,
author = {H. Jaeger},
title = {Adaptive nonlinear system identification with echo state networks},
note = {Proc. NIPS 02},
year = 2002,
url = {http://www.faculty.iu-bremen.de/hjaeger/pubs/esn_NIPS02}
}
@TechReport{Goldenholz2002,
author = {D. Goldenholz},
title = {Liquid Computig: A real Effect},
institution = {Boston University Department of Biomedical
Engineering},
year = 2002,
url = {papers/Goldenholz-report.pdf}
}
@Book{Kotter2002,
editor = {R. K\"otter},
title = {Neuroscience Databases: A Practical Guide},
publisher = {Kluwer Academic Publishers},
year = 2002,
url = {http://www.hirnforschung.net/neurodatbook/}
}