[{"article_type":"original","volume":120,"month":"11","publication_identifier":{"eissn":["1091-6490"],"issn":["0027-8424"]},"quality_controlled":"1","file_date_updated":"2023-12-11T12:45:12Z","ddc":["570"],"article_processing_charge":"Yes (in subscription journal)","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","intvolume":"       120","date_created":"2023-12-10T23:01:00Z","article_number":"e2306525120","acknowledgement":"We thank Prof. C. Nazaret and Prof. J.-P. Mazat for sharing the code of their mitochondrial model. We also thank G. Miesenböck, E. Marder, L. Abbott, A. Kempf, P. Hasenhuetl, W. Podlaski, F. Zenke, E. Agnes, P. Bozelos, J. Watson, B. Confavreux, and G. Christodoulou, and the rest of the Vogels Lab for their feedback. This work was funded by Wellcome Trust and Royal Society Sir Henry Dale Research Fellowship (WT100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and a UK Research and Innovation, Biotechnology and Biological Sciences Research Council grant (UKRI-BBSRC BB/N019512/1).","department":[{"_id":"TiVo"}],"pmid":1,"date_published":"2023-11-21T00:00:00Z","author":[{"last_name":"Chintaluri","id":"E4EDB536-3485-11EA-98D2-20AF3DDC885E","full_name":"Chintaluri, Chaitanya","first_name":"Chaitanya"},{"first_name":"Tim P","full_name":"Vogels, Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels","orcid":"0000-0003-3295-6181"}],"_id":"14666","oa":1,"type":"journal_article","doi":"10.1073/pnas.2306525120","language":[{"iso":"eng"}],"year":"2023","publisher":"National Academy of Sciences","publication":"Proceedings of the National Academy of Sciences of the United States of America","title":"Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species","file":[{"file_id":"14678","checksum":"bf4ec38602a70dae4338077a5a4d497f","date_created":"2023-12-11T12:45:12Z","date_updated":"2023-12-11T12:45:12Z","success":1,"file_name":"2023_PNAS_Chintaluri.pdf","file_size":16891602,"access_level":"open_access","relation":"main_file","content_type":"application/pdf","creator":"dernst"}],"external_id":{"pmid":["37988463"]},"related_material":{"link":[{"url":"https://github.com/ccluri/metabolic_spiking","relation":"software"}]},"scopus_import":"1","issue":"48","has_accepted_license":"1","citation":{"chicago":"Chintaluri, Chaitanya, and Tim P Vogels. “Metabolically Regulated Spiking Could Serve Neuronal Energy Homeostasis and Protect from Reactive Oxygen Species.” <i>Proceedings of the National Academy of Sciences of the United States of America</i>. National Academy of Sciences, 2023. <a href=\"https://doi.org/10.1073/pnas.2306525120\">https://doi.org/10.1073/pnas.2306525120</a>.","ieee":"C. Chintaluri and T. P. Vogels, “Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species,” <i>Proceedings of the National Academy of Sciences of the United States of America</i>, vol. 120, no. 48. National Academy of Sciences, 2023.","apa":"Chintaluri, C., &#38; Vogels, T. P. (2023). Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. National Academy of Sciences. <a href=\"https://doi.org/10.1073/pnas.2306525120\">https://doi.org/10.1073/pnas.2306525120</a>","ista":"Chintaluri C, Vogels TP. 2023. Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species. Proceedings of the National Academy of Sciences of the United States of America. 120(48), e2306525120.","short":"C. Chintaluri, T.P. Vogels, Proceedings of the National Academy of Sciences of the United States of America 120 (2023).","mla":"Chintaluri, Chaitanya, and Tim P. Vogels. “Metabolically Regulated Spiking Could Serve Neuronal Energy Homeostasis and Protect from Reactive Oxygen Species.” <i>Proceedings of the National Academy of Sciences of the United States of America</i>, vol. 120, no. 48, e2306525120, National Academy of Sciences, 2023, doi:<a href=\"https://doi.org/10.1073/pnas.2306525120\">10.1073/pnas.2306525120</a>.","ama":"Chintaluri C, Vogels TP. Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. 2023;120(48). doi:<a href=\"https://doi.org/10.1073/pnas.2306525120\">10.1073/pnas.2306525120</a>"},"project":[{"name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","grant_number":"214316/Z/18/Z"}],"day":"21","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"oa_version":"None","publication_status":"published","abstract":[{"lang":"eng","text":"So-called spontaneous activity is a central hallmark of most nervous systems. Such non-causal firing is contrary to the tenet of spikes as a means of communication, and its purpose remains unclear. We propose that self-initiated firing can serve as a release valve to protect neurons from the toxic conditions arising in mitochondria from lower-than-baseline energy consumption. To demonstrate the viability of our hypothesis, we built a set of models that incorporate recent experimental results indicating homeostatic control of metabolic products—Adenosine triphosphate (ATP), adenosine diphosphate (ADP), and reactive oxygen species (ROS)—by changes in firing. We explore the relationship of metabolic cost of spiking with its effect on the temporal patterning of spikes and reproduce experimentally observed changes in intrinsic firing in the fruitfly dorsal fan-shaped body neuron in a model with ROS-modulated potassium channels. We also show that metabolic spiking homeostasis can produce indefinitely sustained avalanche dynamics in cortical circuits. Our theory can account for key features of neuronal activity observed in many studies ranging from ion channel function all the way to resting state dynamics. We finish with a set of experimental predictions that would confirm an integrated, crucial role for metabolically regulated spiking and firmly link metabolic homeostasis and neuronal function."}],"status":"public","date_updated":"2023-12-11T12:47:41Z"},{"project":[{"grant_number":"214316/Z/18/Z","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks."},{"grant_number":"819603","call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"}],"citation":{"ieee":"D. W. Jia, T. P. Vogels, and R. P. Costa, “Developmental depression-to-facilitation shift controls excitation-inhibition balance,” <i>Communications biology</i>, vol. 5. Springer Nature, 2022.","chicago":"Jia, David W., Tim P Vogels, and Rui Ponte Costa. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” <i>Communications Biology</i>. Springer Nature, 2022. <a href=\"https://doi.org/10.1038/s42003-022-03801-2\">https://doi.org/10.1038/s42003-022-03801-2</a>.","ama":"Jia DW, Vogels TP, Costa RP. Developmental depression-to-facilitation shift controls excitation-inhibition balance. <i>Communications biology</i>. 2022;5. doi:<a href=\"https://doi.org/10.1038/s42003-022-03801-2\">10.1038/s42003-022-03801-2</a>","ista":"Jia DW, Vogels TP, Costa RP. 2022. Developmental depression-to-facilitation shift controls excitation-inhibition balance. Communications biology. 5, 873.","mla":"Jia, David W., et al. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” <i>Communications Biology</i>, vol. 5, 873, Springer Nature, 2022, doi:<a href=\"https://doi.org/10.1038/s42003-022-03801-2\">10.1038/s42003-022-03801-2</a>.","short":"D.W. Jia, T.P. Vogels, R.P. Costa, Communications Biology 5 (2022).","apa":"Jia, D. W., Vogels, T. P., &#38; Costa, R. P. (2022). Developmental depression-to-facilitation shift controls excitation-inhibition balance. <i>Communications Biology</i>. Springer Nature. <a href=\"https://doi.org/10.1038/s42003-022-03801-2\">https://doi.org/10.1038/s42003-022-03801-2</a>"},"has_accepted_license":"1","scopus_import":"1","external_id":{"isi":["000844814800007"]},"file":[{"success":1,"date_updated":"2022-09-05T08:55:11Z","file_id":"12022","checksum":"3ec724c4f6d3440028c217305e32915f","date_created":"2022-09-05T08:55:11Z","file_size":2491191,"file_name":"2022_CommBiology_Jia.pdf","content_type":"application/pdf","relation":"main_file","access_level":"open_access","creator":"dernst"}],"title":"Developmental depression-to-facilitation shift controls excitation-inhibition balance","publication":"Communications biology","date_updated":"2023-08-03T13:22:42Z","status":"public","ec_funded":1,"abstract":[{"text":"Changes in the short-term dynamics of excitatory synapses over development have been observed throughout cortex, but their purpose and consequences remain unclear. Here, we propose that developmental changes in synaptic dynamics buffer the effect of slow inhibitory long-term plasticity, allowing for continuously stable neural activity. Using computational modeling we demonstrate that early in development excitatory short-term depression quickly stabilises neural activity, even in the face of strong, unbalanced excitation. We introduce a model of the commonly observed developmental shift from depression to facilitation and show that neural activity remains stable throughout development, while inhibitory synaptic plasticity slowly balances excitation, consistent with experimental observations. Our model predicts changes in the input responses from phasic to phasic-and-tonic and more precise spike timings. We also observe a gradual emergence of short-lasting memory traces governed by short-term plasticity development. We conclude that the developmental depression-to-facilitation shift may control excitation-inhibition balance throughout development with important functional consequences.","lang":"eng"}],"publication_status":"published","oa_version":"Published Version","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"day":"25","acknowledgement":"We would like to thank the Vogels Lab for feedback on an earlier version of this manuscript. D.W.J. was supported by a Marshall Scholarship and a Clarendon Scholarship. R.P.C. and T.P.V. were supported by a Wellcome Trust and Royal Society Sir Henry Dale Fellowship (WT 100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant (SYNAPSEEK).","department":[{"_id":"TiVo"}],"date_created":"2022-09-04T22:02:02Z","article_number":"873","intvolume":"         5","article_processing_charge":"No","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","ddc":["570"],"file_date_updated":"2022-09-05T08:55:11Z","quality_controlled":"1","publication_identifier":{"eissn":["2399-3642"]},"month":"08","volume":5,"article_type":"original","isi":1,"publisher":"Springer Nature","year":"2022","language":[{"iso":"eng"}],"doi":"10.1038/s42003-022-03801-2","type":"journal_article","oa":1,"_id":"12009","author":[{"last_name":"Jia","full_name":"Jia, David W.","first_name":"David W."},{"full_name":"Vogels, Tim P","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels","orcid":"0000-0003-3295-6181"},{"last_name":"Costa","full_name":"Costa, Rui Ponte","first_name":"Rui Ponte"}],"date_published":"2022-08-25T00:00:00Z"},{"quality_controlled":"1","publication_identifier":{"eissn":["1553-7358"]},"month":"08","volume":18,"article_type":"original","isi":1,"department":[{"_id":"TiVo"}],"acknowledgement":"We thank Friedemann Zenke for his comments, especially on the effect of the self loops on the spectrum. We also thank Ken Miller and Bill Podlaski for helpful comments. This research was funded by a Wellcome Trust and Royal Society Henry Dale Research Fellowship (WT100000; TPV), a Wellcome Senior Research Fellowship (214316/Z/18/Z; GC, EJA, and TPV), and a Research Project Grant by the Leverhulme Trust (RPG-2016-446; EJA and TPV). ","date_created":"2022-09-11T22:01:56Z","article_number":"e1010365","intvolume":"        18","article_processing_charge":"No","ddc":["570"],"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","file_date_updated":"2022-09-12T07:47:55Z","oa":1,"_id":"12084","author":[{"first_name":"Georgia","full_name":"Christodoulou, Georgia","last_name":"Christodoulou"},{"full_name":"Vogels, Tim P","first_name":"Tim P","orcid":"0000-0003-3295-6181","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels"},{"full_name":"Agnes, Everton J.","first_name":"Everton J.","last_name":"Agnes"}],"date_published":"2022-08-15T00:00:00Z","publisher":"Public Library of Science","year":"2022","language":[{"iso":"eng"}],"doi":"10.1371/journal.pcbi.1010365","type":"journal_article","external_id":{"isi":["000937227700001"]},"title":"Regimes and mechanisms of transient amplification in abstract and biological neural networks","file":[{"creator":"dernst","access_level":"open_access","content_type":"application/pdf","relation":"main_file","file_name":"2022_PLoSCompBio_Christodoulou.pdf","file_size":2867337,"file_id":"12090","date_created":"2022-09-12T07:47:55Z","checksum":"8a81ab29f837991ee0ea770817c4a50e","success":1,"date_updated":"2022-09-12T07:47:55Z"}],"publication":"PLoS Computational Biology","project":[{"grant_number":"214316/Z/18/Z","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87"}],"citation":{"chicago":"Christodoulou, Georgia, Tim P Vogels, and Everton J. Agnes. “Regimes and Mechanisms of Transient Amplification in Abstract and Biological Neural Networks.” <i>PLoS Computational Biology</i>. Public Library of Science, 2022. <a href=\"https://doi.org/10.1371/journal.pcbi.1010365\">https://doi.org/10.1371/journal.pcbi.1010365</a>.","ieee":"G. Christodoulou, T. P. Vogels, and E. J. Agnes, “Regimes and mechanisms of transient amplification in abstract and biological neural networks,” <i>PLoS Computational Biology</i>, vol. 18, no. 8. Public Library of Science, 2022.","ista":"Christodoulou G, Vogels TP, Agnes EJ. 2022. Regimes and mechanisms of transient amplification in abstract and biological neural networks. PLoS Computational Biology. 18(8), e1010365.","short":"G. Christodoulou, T.P. Vogels, E.J. Agnes, PLoS Computational Biology 18 (2022).","mla":"Christodoulou, Georgia, et al. “Regimes and Mechanisms of Transient Amplification in Abstract and Biological Neural Networks.” <i>PLoS Computational Biology</i>, vol. 18, no. 8, e1010365, Public Library of Science, 2022, doi:<a href=\"https://doi.org/10.1371/journal.pcbi.1010365\">10.1371/journal.pcbi.1010365</a>.","apa":"Christodoulou, G., Vogels, T. P., &#38; Agnes, E. J. (2022). Regimes and mechanisms of transient amplification in abstract and biological neural networks. <i>PLoS Computational Biology</i>. Public Library of Science. <a href=\"https://doi.org/10.1371/journal.pcbi.1010365\">https://doi.org/10.1371/journal.pcbi.1010365</a>","ama":"Christodoulou G, Vogels TP, Agnes EJ. Regimes and mechanisms of transient amplification in abstract and biological neural networks. <i>PLoS Computational Biology</i>. 2022;18(8). doi:<a href=\"https://doi.org/10.1371/journal.pcbi.1010365\">10.1371/journal.pcbi.1010365</a>"},"has_accepted_license":"1","issue":"8","scopus_import":"1","abstract":[{"lang":"eng","text":"Neuronal networks encode information through patterns of activity that define the networks’ function. The neurons’ activity relies on specific connectivity structures, yet the link between structure and function is not fully understood. Here, we tackle this structure-function problem with a new conceptual approach. Instead of manipulating the connectivity directly, we focus on upper triangular matrices, which represent the network dynamics in a given orthonormal basis obtained by the Schur decomposition. This abstraction allows us to independently manipulate the eigenspectrum and feedforward structures of a connectivity matrix. Using this method, we describe a diverse repertoire of non-normal transient amplification, and to complement the analysis of the dynamical regimes, we quantify the geometry of output trajectories through the effective rank of both the eigenvector and the dynamics matrices. Counter-intuitively, we find that shrinking the eigenspectrum’s imaginary distribution leads to highly amplifying regimes in linear and long-lasting dynamics in nonlinear networks. We also find a trade-off between amplification and dimensionality of neuronal dynamics, i.e., trajectories in neuronal state-space. Networks that can amplify a large number of orthogonal initial conditions produce neuronal trajectories that lie in the same subspace of the neuronal state-space. Finally, we examine networks of excitatory and inhibitory neurons. We find that the strength of global inhibition is directly linked with the amplitude of amplification, such that weakening inhibitory weights also decreases amplification, and that the eigenspectrum’s imaginary distribution grows with an increase in the ratio between excitatory-to-inhibitory and excitatory-to-excitatory connectivity strengths. Consequently, the strength of global inhibition reveals itself as a strong signature for amplification and a potential control mechanism to switch dynamical regimes. Our results shed a light on how biological networks, i.e., networks constrained by Dale’s law, may be optimised for specific dynamical regimes."}],"publication_status":"published","oa_version":"Published Version","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"day":"15","date_updated":"2023-08-03T14:06:29Z","status":"public"},{"year":"2021","publisher":"Neural Information Processing Systems Foundation","language":[{"iso":"eng"}],"type":"conference","_id":"11453","oa":1,"author":[{"full_name":"Braun, Lukas","first_name":"Lukas","last_name":"Braun"},{"first_name":"Tim P","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels"}],"date_published":"2021-12-01T00:00:00Z","department":[{"_id":"TiVo"}],"acknowledgement":"We would like to thank Professor Dr. Henning Sprekeler for his valuable suggestions and Dr. Andrew Saxe, Milan Klöwer and Anna Wallis for their constructive feedback on the manuscript. Lukas Braun was supported by the Network of European Neuroscience Schools through their NENS Exchange Grant program, by the European Union through their European Community Action Scheme for the Mobility of University Students, the Woodward Scholarship awarded by Wadham College, Oxford and the Medical Research Council [MR/N013468/1]. Tim P. Vogels was supported by a Wellcome Trust Senior Research Fellowship [214316/Z/18/Z].","date_created":"2022-06-19T22:01:59Z","intvolume":"        20","conference":{"end_date":"2021-12-14","start_date":"2021-12-06","name":"NeurIPS: Neural Information Processing Systems","location":"Virtual, Online"},"article_processing_charge":"No","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","publication_identifier":{"isbn":["9781713845393"],"issn":["1049-5258"]},"quality_controlled":"1","month":"12","volume":20,"date_updated":"2022-06-20T07:12:58Z","status":"public","abstract":[{"text":"Neuronal computations depend on synaptic connectivity and intrinsic electrophysiological properties. Synaptic connectivity determines which inputs from presynaptic neurons are integrated, while cellular properties determine how inputs are filtered over time. Unlike their biological counterparts, most computational approaches to learning in simulated neural networks are limited to changes in synaptic connectivity. However, if intrinsic parameters change, neural computations are altered drastically. Here, we include the parameters that determine the intrinsic properties,\r\ne.g., time constants and reset potential, into the learning paradigm. Using sparse feedback signals that indicate target spike times, and gradient-based parameter updates, we show that the intrinsic parameters can be learned along with the synaptic weights to produce specific input-output functions. Specifically, we use a teacher-student paradigm in which a randomly initialised leaky integrate-and-fire or resonate-and-fire neuron must recover the parameters of a teacher neuron. We show that complex temporal functions can be learned online and without backpropagation through time, relying on event-based updates only. Our results are a step towards online learning of neural computations from ungraded and unsigned sparse feedback signals with a biologically inspired learning mechanism.","lang":"eng"}],"day":"01","publication_status":"published","page":"16437-16450","oa_version":"Published Version","citation":{"apa":"Braun, L., &#38; Vogels, T. P. (2021). Online learning of neural computations from sparse temporal feedback. In <i>Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems</i> (Vol. 20, pp. 16437–16450). Virtual, Online: Neural Information Processing Systems Foundation.","short":"L. Braun, T.P. Vogels, in:, Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2021, pp. 16437–16450.","mla":"Braun, Lukas, and Tim P. Vogels. “Online Learning of Neural Computations from Sparse Temporal Feedback.” <i>Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems</i>, vol. 20, Neural Information Processing Systems Foundation, 2021, pp. 16437–50.","ista":"Braun L, Vogels TP. 2021. Online learning of neural computations from sparse temporal feedback. Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems vol. 20, 16437–16450.","ama":"Braun L, Vogels TP. Online learning of neural computations from sparse temporal feedback. In: <i>Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems</i>. Vol 20. Neural Information Processing Systems Foundation; 2021:16437-16450.","chicago":"Braun, Lukas, and Tim P Vogels. “Online Learning of Neural Computations from Sparse Temporal Feedback.” In <i>Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems</i>, 20:16437–50. Neural Information Processing Systems Foundation, 2021.","ieee":"L. Braun and T. P. Vogels, “Online learning of neural computations from sparse temporal feedback,” in <i>Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems</i>, Virtual, Online, 2021, vol. 20, pp. 16437–16450."},"project":[{"name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","grant_number":"214316/Z/18/Z"}],"scopus_import":"1","main_file_link":[{"open_access":"1","url":"https://proceedings.neurips.cc/paper/2021/file/88e1ce84f9feef5a08d0df0334c53468-Paper.pdf"}],"title":"Online learning of neural computations from sparse temporal feedback","publication":"Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems"},{"day":"01","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"publication_status":"published","page":"899-925","oa_version":"Published Version","abstract":[{"lang":"eng","text":"Brains process information in spiking neural networks. Their intricate connections shape the diverse functions these networks perform. In comparison, the functional capabilities of models of spiking networks are still rudimentary. This shortcoming is mainly due to the lack of insight and practical algorithms to construct the necessary connectivity. Any such algorithm typically attempts to build networks by iteratively reducing the error compared to a desired output. But assigning credit to hidden units in multi-layered spiking networks has remained challenging due to the non-differentiable nonlinearity of spikes. To avoid this issue, one can employ surrogate gradients to discover the required connectivity in spiking network models. However, the choice of a surrogate is not unique, raising the question of how its implementation influences the effectiveness of the method. Here, we use numerical simulations to systematically study how essential design parameters of surrogate gradients impact learning performance on a range of classification problems. We show that surrogate gradient learning is robust to different shapes of underlying surrogate derivatives, but the choice of the derivative’s scale can substantially affect learning performance. When we combine surrogate gradients with a suitable activity regularization technique, robust information processing can be achieved in spiking networks even at the sparse activity limit. Our study provides a systematic account of the remarkable robustness of surrogate gradient learning and serves as a practical guide to model functional spiking neural networks."}],"ec_funded":1,"status":"public","date_updated":"2023-08-04T10:53:14Z","publication":"Neural Computation","title":"The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks","file":[{"date_created":"2022-04-08T06:05:39Z","file_id":"11131","checksum":"eac5a51c24c8989ae7cf9ae32ec3bc95","success":1,"date_updated":"2022-04-08T06:05:39Z","file_name":"2021_NeuralComputation_Zenke.pdf","file_size":1611614,"access_level":"open_access","content_type":"application/pdf","relation":"main_file","creator":"dernst"}],"external_id":{"isi":["000663433900003"],"pmid":["33513328"]},"scopus_import":"1","issue":"4","has_accepted_license":"1","citation":{"ieee":"F. Zenke and T. P. Vogels, “The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks,” <i>Neural Computation</i>, vol. 33, no. 4. MIT Press, pp. 899–925, 2021.","chicago":"Zenke, Friedemann, and Tim P Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” <i>Neural Computation</i>. MIT Press, 2021. <a href=\"https://doi.org/10.1162/neco_a_01367\">https://doi.org/10.1162/neco_a_01367</a>.","ama":"Zenke F, Vogels TP. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. <i>Neural Computation</i>. 2021;33(4):899-925. doi:<a href=\"https://doi.org/10.1162/neco_a_01367\">10.1162/neco_a_01367</a>","apa":"Zenke, F., &#38; Vogels, T. P. (2021). The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. <i>Neural Computation</i>. MIT Press. <a href=\"https://doi.org/10.1162/neco_a_01367\">https://doi.org/10.1162/neco_a_01367</a>","mla":"Zenke, Friedemann, and Tim P. Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” <i>Neural Computation</i>, vol. 33, no. 4, MIT Press, 2021, pp. 899–925, doi:<a href=\"https://doi.org/10.1162/neco_a_01367\">10.1162/neco_a_01367</a>.","ista":"Zenke F, Vogels TP. 2021. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. Neural Computation. 33(4), 899–925.","short":"F. Zenke, T.P. Vogels, Neural Computation 33 (2021) 899–925."},"project":[{"grant_number":"819603","call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"},{"grant_number":"214316/Z/18/Z","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks."}],"date_published":"2021-03-01T00:00:00Z","author":[{"first_name":"Friedemann","full_name":"Zenke, Friedemann","last_name":"Zenke","orcid":"0000-0003-1883-644X"},{"last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","first_name":"Tim P"}],"_id":"8253","oa":1,"type":"journal_article","doi":"10.1162/neco_a_01367","language":[{"iso":"eng"}],"year":"2021","publisher":"MIT Press","isi":1,"article_type":"original","volume":33,"month":"03","publication_identifier":{"issn":["0899-7667"],"eissn":["1530-888X"]},"quality_controlled":"1","file_date_updated":"2022-04-08T06:05:39Z","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","article_processing_charge":"No","ddc":["000","570"],"intvolume":"        33","date_created":"2020-08-12T12:08:24Z","department":[{"_id":"TiVo"}],"acknowledgement":"F.Z. was supported by the Wellcome Trust (110124/Z/15/Z) and the Novartis Research Foundation. T.P.V. was supported by a Wellcome Trust Sir Henry Dale Research fellowship (WT100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant SYNAPSEEK.","pmid":1},{"date_updated":"2023-10-18T09:20:55Z","status":"public","abstract":[{"lang":"eng","text":"The search for biologically faithful synaptic plasticity rules has resulted in a large body of models. They are usually inspired by – and fitted to – experimental data, but they rarely produce neural dynamics that serve complex functions. These failures suggest that current plasticity models are still under-constrained by existing data. Here, we present an alternative approach that uses meta-learning to discover plausible synaptic plasticity rules. Instead of experimental data, the rules are constrained by the functions they implement and the structure they are meant to produce. Briefly, we parameterize synaptic plasticity rules by a Volterra expansion and then use supervised learning methods (gradient descent or evolutionary strategies) to minimize a problem-dependent loss function that quantifies how effectively a candidate plasticity rule transforms an initially random network into one with the desired function. We first validate our approach by re-discovering previously described plasticity rules, starting at the single-neuron level and “Oja’s rule”, a simple Hebbian plasticity rule that captures the direction of most variability of inputs to a neuron (i.e., the first principal component). We expand the problem to the network level and ask the framework to find Oja’s rule together with an anti-Hebbian rule such that an initially random two-layer firing-rate network will recover several principal components of the input space after learning. Next, we move to networks of integrate-and-fire neurons with plastic inhibitory afferents. We train for rules that achieve a target firing rate by countering tuned excitation. Our algorithm discovers a specific subset of the manifold of rules that can solve this task. Our work is a proof of principle of an automated and unbiased approach to unveil synaptic plasticity rules that obey biological constraints and can solve complex functions."}],"ec_funded":1,"day":"06","publication_status":"published","oa_version":"Published Version","page":"16398-16408","citation":{"ama":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In: <i>Advances in Neural Information Processing Systems</i>. Vol 33. ; 2020:16398-16408.","apa":"Confavreux, B. J., Zenke, F., Agnes, E. J., Lillicrap, T., &#38; Vogels, T. P. (2020). A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In <i>Advances in Neural Information Processing Systems</i> (Vol. 33, pp. 16398–16408). Vancouver, Canada.","mla":"Confavreux, Basile J., et al. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” <i>Advances in Neural Information Processing Systems</i>, vol. 33, 2020, pp. 16398–408.","ista":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. 2020. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. Advances in Neural Information Processing Systems. NeurIPS: Conference on Neural Information Processing Systems vol. 33, 16398–16408.","short":"B.J. Confavreux, F. Zenke, E.J. Agnes, T. Lillicrap, T.P. Vogels, in:, Advances in Neural Information Processing Systems, 2020, pp. 16398–16408.","ieee":"B. J. Confavreux, F. Zenke, E. J. Agnes, T. Lillicrap, and T. P. Vogels, “A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network,” in <i>Advances in Neural Information Processing Systems</i>, Vancouver, Canada, 2020, vol. 33, pp. 16398–16408.","chicago":"Confavreux, Basile J, Friedemann Zenke, Everton J. Agnes, Timothy Lillicrap, and Tim P Vogels. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” In <i>Advances in Neural Information Processing Systems</i>, 33:16398–408, 2020."},"project":[{"name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","grant_number":"214316/Z/18/Z"},{"name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020","grant_number":"819603"}],"scopus_import":"1","main_file_link":[{"open_access":"1","url":"https://proceedings.neurips.cc/paper/2020/hash/bdbd5ebfde4934142c8a88e7a3796cd5-Abstract.html"}],"title":"A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network","related_material":{"link":[{"url":"https://doi.org/10.1101/2020.10.24.353409","relation":"is_continued_by"}],"record":[{"relation":"dissertation_contains","status":"public","id":"14422"}]},"publication":"Advances in Neural Information Processing Systems","year":"2020","language":[{"iso":"eng"}],"type":"conference","_id":"9633","oa":1,"author":[{"last_name":"Confavreux","id":"C7610134-B532-11EA-BD9F-F5753DDC885E","full_name":"Confavreux, Basile J","first_name":"Basile J"},{"last_name":"Zenke","first_name":"Friedemann","full_name":"Zenke, Friedemann"},{"last_name":"Agnes","full_name":"Agnes, Everton J.","first_name":"Everton J."},{"last_name":"Lillicrap","full_name":"Lillicrap, Timothy","first_name":"Timothy"},{"full_name":"Vogels, Tim P","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels","orcid":"0000-0003-3295-6181"}],"date_published":"2020-12-06T00:00:00Z","department":[{"_id":"TiVo"}],"acknowledgement":"We would like to thank Chaitanya Chintaluri, Georgia Christodoulou, Bill Podlaski and Merima Šabanovic for useful discussions and comments. This work was supported by a Wellcome Trust ´ Senior Research Fellowship (214316/Z/18/Z), a BBSRC grant (BB/N019512/1), an ERC consolidator Grant (SYNAPSEEK), a Leverhulme Trust Project Grant (RPG-2016-446), and funding from École Polytechnique, Paris.","date_created":"2021-07-04T22:01:27Z","intvolume":"        33","conference":{"start_date":"2020-12-06","end_date":"2020-12-12","name":"NeurIPS: Conference on Neural Information Processing Systems","location":"Vancouver, Canada"},"user_id":"6785fbc1-c503-11eb-8a32-93094b40e1cf","article_processing_charge":"No","publication_identifier":{"issn":["1049-5258"]},"quality_controlled":"1","month":"12","volume":33}]
