@article{9362,
  abstract     = {A central goal in systems neuroscience is to understand the functions performed by neural circuits. Previous top-down models addressed this question by comparing the behaviour of an ideal model circuit, optimised to perform a given function, with neural recordings. However, this requires guessing in advance what function is being performed, which may not be possible for many neural systems. To address this, we propose an inverse reinforcement learning (RL) framework for inferring the function performed by a neural network from data. We assume that the responses of each neuron in a network are optimised so as to drive the network towards ‘rewarded’ states, that are desirable for performing a given function. We then show how one can use inverse RL to infer the reward function optimised by the network from observing its responses. This inferred reward function can be used to predict how the neural network should adapt its dynamics to perform the same function when the external environment or network structure changes. This could lead to theoretical predictions about how neural network dynamics adapt to deal with cell death and/or varying sensory stimulus statistics.},
  author       = {Chalk, Matthew J and Tkačik, Gašper and Marre, Olivier},
  issn         = {19326203},
  journal      = {PLoS ONE},
  number       = {4},
  publisher    = {Public Library of Science},
  title        = {{Inferring the function performed by a recurrent neural network}},
  doi          = {10.1371/journal.pone.0248940},
  volume       = {16},
  year         = {2021},
}

@article{543,
  abstract     = {A central goal in theoretical neuroscience is to predict the response properties of sensory neurons from first principles. To this end, “efficient coding” posits that sensory neurons encode maximal information about their inputs given internal constraints. There exist, however, many variants of efficient coding (e.g., redundancy reduction, different formulations of predictive coding, robust coding, sparse coding, etc.), differing in their regimes of applicability, in the relevance of signals to be encoded, and in the choice of constraints. It is unclear how these types of efficient coding relate or what is expected when different coding objectives are combined. Here we present a unified framework that encompasses previously proposed efficient coding models and extends to unique regimes. We show that optimizing neural responses to encode predictive information can lead them to either correlate or decorrelate their inputs, depending on the stimulus statistics; in contrast, at low noise, efficiently encoding the past always predicts decorrelation. Later, we investigate coding of naturalistic movies and show that qualitatively different types of visual motion tuning and levels of response sparsity are predicted, depending on whether the objective is to recover the past or predict the future. Our approach promises a way to explain the observed diversity of sensory neural responses, as due to multiple functional goals and constraints fulfilled by different cell types and/or circuits.},
  author       = {Chalk, Matthew J and Marre, Olivier and Tkacik, Gasper},
  journal      = {PNAS},
  number       = {1},
  pages        = {186 -- 191},
  publisher    = {National Academy of Sciences},
  title        = {{Toward a unified theory of efficient, predictive, and sparse coding}},
  doi          = {10.1073/pnas.1711114115},
  volume       = {115},
  year         = {2018},
}

@article{680,
  abstract     = {In order to respond reliably to specific features of their environment, sensory neurons need to integrate multiple incoming noisy signals. Crucially, they also need to compete for the interpretation of those signals with other neurons representing similar features. The form that this competition should take depends critically on the noise corrupting these signals. In this study we show that for the type of noise commonly observed in sensory systems, whose variance scales with the mean signal, sensory neurons should selectively divide their input signals by their predictions, suppressing ambiguous cues while amplifying others. Any change in the stimulus context alters which inputs are suppressed, leading to a deep dynamic reshaping of neural receptive fields going far beyond simple surround suppression. Paradoxically, these highly variable receptive fields go alongside and are in fact required for an invariant representation of external sensory features. In addition to offering a normative account of context-dependent changes in sensory responses, perceptual inference in the presence of signal-dependent noise accounts for ubiquitous features of sensory neurons such as divisive normalization, gain control and contrast dependent temporal dynamics.},
  author       = {Chalk, Matthew J and Masset, Paul and Gutkin, Boris and Denève, Sophie},
  issn         = {1553734X},
  journal      = {PLoS Computational Biology},
  number       = {6},
  publisher    = {Public Library of Science},
  title        = {{Sensory noise predicts divisive reshaping of receptive fields}},
  doi          = {10.1371/journal.pcbi.1005582},
  volume       = {13},
  year         = {2017},
}

@misc{9855,
  abstract     = {Includes derivation of optimal estimation algorithm, generalisation to non-poisson noise statistics, correlated input noise, and implementation of in a multi-layer neural network.},
  author       = {Chalk, Matthew J and Masset, Paul and Gutkin, Boris and Denève, Sophie},
  publisher    = {Public Library of Science},
  title        = {{Supplementary appendix}},
  doi          = {10.1371/journal.pcbi.1005582.s001},
  year         = {2017},
}

@inproceedings{1082,
  abstract     = {In many applications, it is desirable to extract only the relevant aspects of data. A principled way to do this is the information bottleneck (IB) method, where one seeks a code that maximises information about a relevance variable, Y, while constraining the information encoded about the original data, X. Unfortunately however, the IB method is computationally demanding when data are high-dimensional and/or non-gaussian. Here we propose an approximate variational scheme for maximising a lower bound on the IB objective, analogous to variational EM. Using this method, we derive an IB algorithm to recover features that are both relevant and sparse. Finally, we demonstrate how kernelised versions of the algorithm can be used to address a broad range of problems with non-linear relation between X and Y.},
  author       = {Chalk, Matthew J and Marre, Olivier and Tkacik, Gasper},
  location     = {Barcelona, Spain},
  pages        = {1965--1973},
  publisher    = {Neural Information Processing Systems},
  title        = {{Relevant sparse codes with variational information bottleneck}},
  volume       = {29},
  year         = {2016},
}

@article{1266,
  abstract     = {Cortical networks exhibit ‘global oscillations’, in which neural spike times are entrained to an underlying oscillatory rhythm, but where individual neurons fire irregularly, on only a fraction of cycles. While the network dynamics underlying global oscillations have been well characterised, their function is debated. Here, we show that such global oscillations are a direct consequence of optimal efficient coding in spiking networks with synaptic delays and noise. To avoid firing unnecessary spikes, neurons need to share information about the network state. Ideally, membrane potentials should be strongly correlated and reflect a ‘prediction error’ while the spikes themselves are uncorrelated and occur rarely. We show that the most efficient representation is when: (i) spike times are entrained to a global Gamma rhythm (implying a consistent representation of the error); but (ii) few neurons fire on each cycle (implying high efficiency), while (iii) excitation and inhibition are tightly balanced. This suggests that cortical networks exhibiting such dynamics are tuned to achieve a maximally efficient population code.},
  author       = {Chalk, Matthew J and Gutkin, Boris and Denève, Sophie},
  journal      = {eLife},
  number       = {2016JULY},
  publisher    = {eLife Sciences Publications},
  title        = {{Neural oscillations as a signature of efficient coding in the presence of synaptic delays}},
  doi          = {10.7554/eLife.13824},
  volume       = {5},
  year         = {2016},
}

