[{"department":[{"_id":"TiVo"}],"date_created":"2024-01-21T23:00:56Z","month":"01","article_type":"original","date_published":"2024-01-16T00:00:00Z","publisher":"Proceedings of the National Academy of Sciences","scopus_import":"1","language":[{"iso":"eng"}],"issue":"3","publication":"Proceedings of the National Academy of Sciences of the United States of America","day":"16","type":"journal_article","intvolume":"       121","status":"public","article_number":"e2307776121","related_material":{"link":[{"url":"https://github.com/ChrisCurrin/pv-kcnc2 ","relation":"software"}]},"doi":"10.1073/pnas.2307776121","year":"2024","ec_funded":1,"external_id":{"pmid":["38194456"]},"title":"A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction","volume":121,"date_updated":"2024-01-23T10:20:40Z","article_processing_charge":"No","pmid":1,"_id":"14841","publication_identifier":{"eissn":["1091-6490"]},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","acknowledgement":"This work was supported by an ERC Consolidator Grant (SYNAPSEEK) to T.P.V., the NOMIS Foundation through the NOMIS Fellowships program at IST Austria to C.B.C., a Jefferson Synaptic Biology Center Pilot Project Grant to M.C., NIH NINDS U54 NS108874 (PI, Alfred L. George), and NIH NINDS R01 NS122887 to E.M.G. The computations were enabled by resources provided by the Swedish National Infrastructure for Computing (SNIC) at the PDC Center for High-Performance Computing, KTH Royal Institute of Technology, partially funded by the Swedish Research Council through grant agreement no. 2018-05973. We thank Akshay Sridhar for the fruitful discussion of the project.","quality_controlled":"1","project":[{"grant_number":"819603","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."}],"oa_version":"None","publication_status":"published","citation":{"ista":"Clatot J, Currin C, Liang Q, Pipatpolkai T, Massey SL, Helbig I, Delemotte L, Vogels TP, Covarrubias M, Goldberg EM. 2024. A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. Proceedings of the National Academy of Sciences of the United States of America. 121(3), e2307776121.","short":"J. Clatot, C. Currin, Q. Liang, T. Pipatpolkai, S.L. Massey, I. Helbig, L. Delemotte, T.P. Vogels, M. Covarrubias, E.M. Goldberg, Proceedings of the National Academy of Sciences of the United States of America 121 (2024).","ama":"Clatot J, Currin C, Liang Q, et al. A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. 2024;121(3). doi:<a href=\"https://doi.org/10.1073/pnas.2307776121\">10.1073/pnas.2307776121</a>","mla":"Clatot, Jerome, et al. “A Structurally Precise Mechanism Links an Epilepsy-Associated KCNC2 Potassium Channel Mutation to Interneuron Dysfunction.” <i>Proceedings of the National Academy of Sciences of the United States of America</i>, vol. 121, no. 3, e2307776121, Proceedings of the National Academy of Sciences, 2024, doi:<a href=\"https://doi.org/10.1073/pnas.2307776121\">10.1073/pnas.2307776121</a>.","chicago":"Clatot, Jerome, Christopher Currin, Qiansheng Liang, Tanadet Pipatpolkai, Shavonne L. Massey, Ingo Helbig, Lucie Delemotte, Tim P Vogels, Manuel Covarrubias, and Ethan M. Goldberg. “A Structurally Precise Mechanism Links an Epilepsy-Associated KCNC2 Potassium Channel Mutation to Interneuron Dysfunction.” <i>Proceedings of the National Academy of Sciences of the United States of America</i>. Proceedings of the National Academy of Sciences, 2024. <a href=\"https://doi.org/10.1073/pnas.2307776121\">https://doi.org/10.1073/pnas.2307776121</a>.","ieee":"J. Clatot <i>et al.</i>, “A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction,” <i>Proceedings of the National Academy of Sciences of the United States of America</i>, vol. 121, no. 3. Proceedings of the National Academy of Sciences, 2024.","apa":"Clatot, J., Currin, C., Liang, Q., Pipatpolkai, T., Massey, S. L., Helbig, I., … Goldberg, E. M. (2024). A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. Proceedings of the National Academy of Sciences. <a href=\"https://doi.org/10.1073/pnas.2307776121\">https://doi.org/10.1073/pnas.2307776121</a>"},"abstract":[{"text":"De novo heterozygous variants in KCNC2 encoding the voltage-gated potassium (K+) channel subunit Kv3.2 are a recently described cause of developmental and epileptic encephalopathy (DEE). A de novo variant in KCNC2 c.374G > A (p.Cys125Tyr) was identified via exome sequencing in a patient with DEE. Relative to wild-type Kv3.2, Kv3.2-p.Cys125Tyr induces K+ currents exhibiting a large hyperpolarizing shift in the voltage dependence of activation, accelerated activation, and delayed deactivation consistent with a relative stabilization of the open conformation, along with increased current density. Leveraging the cryogenic electron microscopy (cryo-EM) structure of Kv3.1, molecular dynamic simulations suggest that a strong π-π stacking interaction between the variant Tyr125 and Tyr156 in the α-6 helix of the T1 domain promotes a relative stabilization of the open conformation of the channel, which underlies the observed gain of function. A multicompartment computational model of a Kv3-expressing parvalbumin-positive cerebral cortex fast-spiking γ-aminobutyric acidergic (GABAergic) interneuron (PV-IN) demonstrates how the Kv3.2-Cys125Tyr variant impairs neuronal excitability and dysregulates inhibition in cerebral cortex circuits to explain the resulting epilepsy.","lang":"eng"}],"author":[{"first_name":"Jerome","full_name":"Clatot, Jerome","last_name":"Clatot"},{"first_name":"Christopher","orcid":"0000-0002-4809-5059","last_name":"Currin","full_name":"Currin, Christopher","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9"},{"first_name":"Qiansheng","full_name":"Liang, Qiansheng","last_name":"Liang"},{"last_name":"Pipatpolkai","full_name":"Pipatpolkai, Tanadet","first_name":"Tanadet"},{"last_name":"Massey","full_name":"Massey, Shavonne L.","first_name":"Shavonne L."},{"full_name":"Helbig, Ingo","last_name":"Helbig","first_name":"Ingo"},{"full_name":"Delemotte, Lucie","last_name":"Delemotte","first_name":"Lucie"},{"first_name":"Tim P","full_name":"Vogels, Tim P","last_name":"Vogels","orcid":"0000-0003-3295-6181","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"},{"last_name":"Covarrubias","full_name":"Covarrubias, Manuel","first_name":"Manuel"},{"full_name":"Goldberg, Ethan M.","last_name":"Goldberg","first_name":"Ethan M."}]},{"citation":{"short":"B.J. Confavreux, Synapseek: Meta-Learning Synaptic Plasticity Rules, Institute of Science and Technology Austria, 2023.","ista":"Confavreux BJ. 2023. Synapseek: Meta-learning synaptic plasticity rules. Institute of Science and Technology Austria.","mla":"Confavreux, Basile J. <i>Synapseek: Meta-Learning Synaptic Plasticity Rules</i>. Institute of Science and Technology Austria, 2023, doi:<a href=\"https://doi.org/10.15479/at:ista:14422\">10.15479/at:ista:14422</a>.","ama":"Confavreux BJ. Synapseek: Meta-learning synaptic plasticity rules. 2023. doi:<a href=\"https://doi.org/10.15479/at:ista:14422\">10.15479/at:ista:14422</a>","chicago":"Confavreux, Basile J. “Synapseek: Meta-Learning Synaptic Plasticity Rules.” Institute of Science and Technology Austria, 2023. <a href=\"https://doi.org/10.15479/at:ista:14422\">https://doi.org/10.15479/at:ista:14422</a>.","ieee":"B. J. Confavreux, “Synapseek: Meta-learning synaptic plasticity rules,” Institute of Science and Technology Austria, 2023.","apa":"Confavreux, B. J. (2023). <i>Synapseek: Meta-learning synaptic plasticity rules</i>. Institute of Science and Technology Austria. <a href=\"https://doi.org/10.15479/at:ista:14422\">https://doi.org/10.15479/at:ista:14422</a>"},"publication_status":"published","author":[{"id":"C7610134-B532-11EA-BD9F-F5753DDC885E","first_name":"Basile J","last_name":"Confavreux","full_name":"Confavreux, Basile J"}],"abstract":[{"text":"Animals exhibit a remarkable ability to learn and remember new behaviors, skills, and associations throughout their lifetime. These capabilities are made possible thanks to a variety of\r\nchanges in the brain throughout adulthood, regrouped under the term \"plasticity\". Some cells\r\nin the brain —neurons— and specifically changes in the connections between neurons, the\r\nsynapses, were shown to be crucial for the formation, selection, and consolidation of memories\r\nfrom past experiences. These ongoing changes of synapses across time are called synaptic\r\nplasticity. Understanding how a myriad of biochemical processes operating at individual\r\nsynapses can somehow work in concert to give rise to meaningful changes in behavior is a\r\nfascinating problem and an active area of research.\r\nHowever, the experimental search for the precise plasticity mechanisms at play in the brain\r\nis daunting, as it is difficult to control and observe synapses during learning. Theoretical\r\napproaches have thus been the default method to probe the plasticity-behavior connection. Such\r\nstudies attempt to extract unifying principles across synapses and model all observed synaptic\r\nchanges using plasticity rules: equations that govern the evolution of synaptic strengths across\r\ntime in neuronal network models. These rules can use many relevant quantities to determine\r\nthe magnitude of synaptic changes, such as the precise timings of pre- and postsynaptic\r\naction potentials, the recent neuronal activity levels, the state of neighboring synapses, etc.\r\nHowever, analytical studies rely heavily on human intuition and are forced to make simplifying\r\nassumptions about plasticity rules.\r\nIn this thesis, we aim to assist and augment human intuition in this search for plasticity rules.\r\nWe explore whether a numerical approach could automatically discover the plasticity rules\r\nthat elicit desired behaviors in large networks of interconnected neurons. This approach is\r\ndubbed meta-learning synaptic plasticity: learning plasticity rules which themselves will make\r\nneuronal networks learn how to solve a desired task. We first write all the potential plasticity\r\nmechanisms to consider using a single expression with adjustable parameters. We then optimize\r\nthese plasticity parameters using evolutionary strategies or Bayesian inference on tasks known\r\nto involve synaptic plasticity, such as familiarity detection and network stabilization.\r\nWe show that these automated approaches are powerful tools, able to complement established\r\nanalytical methods. By comprehensively screening plasticity rules at all synapse types in\r\nrealistic, spiking neuronal network models, we discover entire sets of degenerate plausible\r\nplasticity rules that reliably elicit memory-related behaviors. Our approaches allow for more\r\nrobust experimental predictions, by abstracting out the idiosyncrasies of individual plasticity\r\nrules, and provide fresh insights on synaptic plasticity in spiking network models.\r\n","lang":"eng"}],"article_processing_charge":"No","date_updated":"2023-10-18T09:20:56Z","project":[{"grant_number":"819603","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."}],"oa_version":"Published Version","user_id":"8b945eb4-e2f2-11eb-945a-df72226e66a9","publication_identifier":{"issn":["2663 - 337X"]},"_id":"14422","ec_funded":1,"doi":"10.15479/at:ista:14422","year":"2023","title":"Synapseek: Meta-learning synaptic plasticity rules","tmp":{"short":"CC BY-NC-SA (4.0)","name":"Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)","image":"/images/cc_by_nc_sa.png","legal_code_url":"https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode"},"alternative_title":["ISTA Thesis"],"ddc":["610"],"related_material":{"record":[{"relation":"part_of_dissertation","id":"9633","status":"public"}]},"type":"dissertation","day":"12","supervisor":[{"id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P","orcid":"0000-0003-3295-6181","last_name":"Vogels","full_name":"Vogels, Tim P"}],"status":"public","page":"148","file_date_updated":"2023-10-18T07:56:08Z","date_published":"2023-10-12T00:00:00Z","month":"10","language":[{"iso":"eng"}],"publisher":"Institute of Science and Technology Austria","department":[{"_id":"GradSch"},{"_id":"TiVo"}],"degree_awarded":"PhD","has_accepted_license":"1","file":[{"embargo_to":"open_access","date_updated":"2023-10-12T14:54:52Z","access_level":"closed","file_name":"Confavreux_Thesis_2A.pdf","file_size":30599717,"embargo":"2024-10-12","date_created":"2023-10-12T14:53:50Z","checksum":"7f636555eae7803323df287672fd13ed","content_type":"application/pdf","relation":"main_file","creator":"cchlebak","file_id":"14424"},{"access_level":"closed","date_updated":"2023-10-18T07:56:08Z","checksum":"725e85946db92290a4583a0de9779e1b","date_created":"2023-10-18T07:38:34Z","file_size":68406739,"file_name":"Confavreux Thesis.zip","creator":"cchlebak","file_id":"14440","relation":"source_file","content_type":"application/x-zip-compressed"}],"date_created":"2023-10-12T14:13:25Z"},{"isi":1,"article_number":"110580","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode","image":"/images/cc_by_nc_nd.png","name":"Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)","short":"CC BY-NC-ND (4.0)"},"ddc":["570"],"ec_funded":1,"year":"2022","doi":"10.1016/j.celrep.2022.110580","external_id":{"isi":["000779794000001"]},"title":"Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome","article_processing_charge":"No","oa":1,"date_updated":"2023-08-03T06:32:55Z","volume":38,"quality_controlled":"1","project":[{"_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","call_identifier":"H2020","grant_number":"819603"},{"name":"NOMIS Fellowship Program","_id":"9B861AAC-BA93-11EA-9121-9846C619BF3A"}],"oa_version":"Published Version","acknowledgement":"We would like to thank Bernardo Rudy, Joanna Mattis, and Laura Mcgarry for comments on a previous version of the manuscript; Xiaohong Zhang for expert technical support and mouse colony maintenance; Melody Cheng for assistance with generation of the graphical abstract; and Jennifer Kearney for the gift of Scn1a+/− mice. This work was supported by the National Institute of Neurological Disorders and Stroke of the National Institutes of Health under F31NS111803 (to K.M.G.) and K08NS097633 and R01NS110869 (to E.M.G.), the Dravet Syndrome Foundation (to A.S.), an ERC Consolidator Grant (SYNAPSEEK) (to T.P.V.), and the NOMIS Foundation through the NOMIS Fellowships program at IST Austria (to C.C.). The graphical abstract was prepared using BioRender software (BioRender.com).","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","publication_identifier":{"eissn":["2211-1247"]},"_id":"11143","citation":{"mla":"Kaneko, Keisuke, et al. “Developmentally Regulated Impairment of Parvalbumin Interneuron Synaptic Transmission in an Experimental Model of Dravet Syndrome.” <i>Cell Reports</i>, vol. 38, no. 13, 110580, Elsevier, 2022, doi:<a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">10.1016/j.celrep.2022.110580</a>.","ama":"Kaneko K, Currin C, Goff KM, et al. Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. <i>Cell Reports</i>. 2022;38(13). doi:<a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">10.1016/j.celrep.2022.110580</a>","ista":"Kaneko K, Currin C, Goff KM, Wengert ER, Somarowthu A, Vogels TP, Goldberg EM. 2022. Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. Cell Reports. 38(13), 110580.","short":"K. Kaneko, C. Currin, K.M. Goff, E.R. Wengert, A. Somarowthu, T.P. Vogels, E.M. Goldberg, Cell Reports 38 (2022).","apa":"Kaneko, K., Currin, C., Goff, K. M., Wengert, E. R., Somarowthu, A., Vogels, T. P., &#38; Goldberg, E. M. (2022). Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. <i>Cell Reports</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">https://doi.org/10.1016/j.celrep.2022.110580</a>","ieee":"K. Kaneko <i>et al.</i>, “Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome,” <i>Cell Reports</i>, vol. 38, no. 13. Elsevier, 2022.","chicago":"Kaneko, Keisuke, Christopher Currin, Kevin M. Goff, Eric R. Wengert, Ala Somarowthu, Tim P Vogels, and Ethan M. Goldberg. “Developmentally Regulated Impairment of Parvalbumin Interneuron Synaptic Transmission in an Experimental Model of Dravet Syndrome.” <i>Cell Reports</i>. Elsevier, 2022. <a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">https://doi.org/10.1016/j.celrep.2022.110580</a>."},"publication_status":"published","author":[{"first_name":"Keisuke","full_name":"Kaneko, Keisuke","last_name":"Kaneko"},{"id":"e8321fc5-3091-11eb-8a53-83f309a11ac9","first_name":"Christopher","orcid":"0000-0002-4809-5059","last_name":"Currin","full_name":"Currin, Christopher"},{"last_name":"Goff","full_name":"Goff, Kevin M.","first_name":"Kevin M."},{"first_name":"Eric R.","last_name":"Wengert","full_name":"Wengert, Eric R."},{"last_name":"Somarowthu","full_name":"Somarowthu, Ala","first_name":"Ala"},{"id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","last_name":"Vogels","first_name":"Tim P"},{"first_name":"Ethan M.","full_name":"Goldberg, Ethan M.","last_name":"Goldberg"}],"abstract":[{"lang":"eng","text":"Dravet syndrome is a neurodevelopmental disorder characterized by epilepsy, intellectual disability, and sudden death due to pathogenic variants in SCN1A with loss of function of the sodium channel subunit Nav1.1. Nav1.1-expressing parvalbumin GABAergic interneurons (PV-INs) from young Scn1a+/− mice show impaired action potential generation. An approach assessing PV-IN function in the same mice at two time points shows impaired spike generation in all Scn1a+/− mice at postnatal days (P) 16–21, whether deceased prior or surviving to P35, with normalization by P35 in surviving mice. However, PV-IN synaptic transmission is dysfunctional in young Scn1a+/− mice that did not survive and in Scn1a+/− mice ≥ P35. Modeling confirms that PV-IN axonal propagation is more sensitive to decreased sodium conductance than spike generation. These results demonstrate dynamic dysfunction in Dravet syndrome: combined abnormalities of PV-IN spike generation and propagation drives early disease severity, while ongoing dysfunction of synaptic transmission contributes to chronic pathology."}],"department":[{"_id":"TiVo"}],"has_accepted_license":"1","date_created":"2022-04-10T22:01:39Z","file":[{"success":1,"content_type":"application/pdf","relation":"main_file","file_id":"11172","creator":"dernst","file_name":"2022_CellReports_Kaneko.pdf","file_size":4774216,"date_created":"2022-04-15T11:00:58Z","checksum":"49105c6c27c9af0f37f50a8bbb4d380d","date_updated":"2022-04-15T11:00:58Z","access_level":"open_access"}],"date_published":"2022-03-29T00:00:00Z","article_type":"original","month":"03","language":[{"iso":"eng"}],"scopus_import":"1","publisher":"Elsevier","file_date_updated":"2022-04-15T11:00:58Z","publication":"Cell Reports","issue":"13","type":"journal_article","day":"29","status":"public","intvolume":"        38"},{"year":"2022","ec_funded":1,"title":"Predictive learning enables neural networks to learn complex working memory tasks","ddc":["000"],"publication_status":"published","citation":{"short":"T.L. Van Der Plas, T.P. Vogels, S.G. Manohar, in:, Proceedings of Machine Learning Research, ML Research Press, 2022, pp. 518–531.","ista":"Van Der Plas TL, Vogels TP, Manohar SG. 2022. Predictive learning enables neural networks to learn complex working memory tasks. Proceedings of Machine Learning Research. vol. 199, 518–531.","mla":"Van Der Plas, Thijs L., et al. “Predictive Learning Enables Neural Networks to Learn Complex Working Memory Tasks.” <i>Proceedings of Machine Learning Research</i>, vol. 199, ML Research Press, 2022, pp. 518–31.","ama":"Van Der Plas TL, Vogels TP, Manohar SG. Predictive learning enables neural networks to learn complex working memory tasks. In: <i>Proceedings of Machine Learning Research</i>. Vol 199. ML Research Press; 2022:518-531.","chicago":"Van Der Plas, Thijs L., Tim P Vogels, and Sanjay G. Manohar. “Predictive Learning Enables Neural Networks to Learn Complex Working Memory Tasks.” In <i>Proceedings of Machine Learning Research</i>, 199:518–31. ML Research Press, 2022.","apa":"Van Der Plas, T. L., Vogels, T. P., &#38; Manohar, S. G. (2022). Predictive learning enables neural networks to learn complex working memory tasks. In <i>Proceedings of Machine Learning Research</i> (Vol. 199, pp. 518–531). ML Research Press.","ieee":"T. L. Van Der Plas, T. P. Vogels, and S. G. Manohar, “Predictive learning enables neural networks to learn complex working memory tasks,” in <i>Proceedings of Machine Learning Research</i>, 2022, vol. 199, pp. 518–531."},"abstract":[{"lang":"eng","text":"Brains are thought to engage in predictive learning - learning to predict upcoming stimuli - to construct an internal model of their environment. This is especially notable for spatial navigation, as first described by Tolman’s latent learning tasks. However, predictive learning has also been observed in sensory cortex, in settings unrelated to spatial navigation. Apart from normative frameworks such as active inference or efficient coding, what could be the utility of learning to predict the patterns of occurrence of correlated stimuli? Here we show that prediction, and thereby the construction of an internal model of sequential stimuli, can bootstrap the learning process of a working memory task in a recurrent neural network. We implemented predictive learning alongside working memory match-tasks, and networks emerged to solve the prediction task first by encoding information across time to predict upcoming stimuli, and then eavesdropped on this solution to solve the matching task. Eavesdropping was most beneficial when neural resources were limited. Hence, predictive learning acts as a general neural mechanism to learn to store sensory information that can later be essential for working memory tasks."}],"author":[{"full_name":"Van Der Plas, Thijs L.","last_name":"Van Der Plas","first_name":"Thijs L."},{"full_name":"Vogels, Tim P","last_name":"Vogels","orcid":"0000-0003-3295-6181","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"},{"last_name":"Manohar","full_name":"Manohar, Sanjay G.","first_name":"Sanjay G."}],"date_updated":"2023-07-18T06:36:28Z","volume":199,"oa":1,"article_processing_charge":"No","_id":"13239","publication_identifier":{"eissn":["2640-3498"]},"acknowledgement":"The authors would like to thank members of the Vogels lab and Manohar lab, as well as Adam Packer, Andrew Saxe, Stefano Sarao Mannelli and Jacob Bakermans for fruitful discussions and comments on earlier versions of the manuscript.\r\nTLvdP was supported by funding from the Biotechnology and Biological Sciences Research Council (BBSRC) [grant number BB/M011224/1]. TPV was supported by an ERC Consolidator Grant (SYNAPSEEK). SGM was funded by a MRC Clinician Scientist Fellowship MR/P00878X and Leverhulme Grant RPG-2018-310.","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","project":[{"grant_number":"819603","call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"}],"quality_controlled":"1","oa_version":"Published Version","month":"12","date_published":"2022-12-01T00:00:00Z","publisher":"ML Research Press","scopus_import":"1","language":[{"iso":"eng"}],"has_accepted_license":"1","department":[{"_id":"TiVo"}],"file":[{"file_size":585135,"file_name":"2022_PMLR_vanderPlas.pdf","checksum":"7530a93ef42e10b4db1e5e4b69796e93","date_created":"2023-07-18T06:32:38Z","access_level":"open_access","date_updated":"2023-07-18T06:32:38Z","success":1,"relation":"main_file","content_type":"application/pdf","file_id":"13243","creator":"dernst"}],"date_created":"2023-07-16T22:01:12Z","day":"01","type":"conference","intvolume":"       199","status":"public","publication":"Proceedings of Machine Learning Research","file_date_updated":"2023-07-18T06:32:38Z","page":"518-531"},{"ddc":["570"],"article_number":"873","isi":1,"tmp":{"image":"/images/cc_by.png","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"title":"Developmental depression-to-facilitation shift controls excitation-inhibition balance","external_id":{"isi":["000844814800007"]},"ec_funded":1,"year":"2022","doi":"10.1038/s42003-022-03801-2","oa_version":"Published Version","quality_controlled":"1","project":[{"grant_number":"214316/Z/18/Z","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87"},{"grant_number":"819603","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."}],"acknowledgement":"We would like to thank the Vogels Lab for feedback on an earlier version of this manuscript. D.W.J. was supported by a Marshall Scholarship and a Clarendon Scholarship. R.P.C. and T.P.V. were supported by a Wellcome Trust and Royal Society Sir Henry Dale Fellowship (WT 100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant (SYNAPSEEK).","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","publication_identifier":{"eissn":["2399-3642"]},"_id":"12009","article_processing_charge":"No","oa":1,"date_updated":"2023-08-03T13:22:42Z","volume":5,"author":[{"first_name":"David W.","last_name":"Jia","full_name":"Jia, David W."},{"first_name":"Tim P","orcid":"0000-0003-3295-6181","last_name":"Vogels","full_name":"Vogels, Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"},{"full_name":"Costa, Rui Ponte","last_name":"Costa","first_name":"Rui Ponte"}],"abstract":[{"lang":"eng","text":"Changes in the short-term dynamics of excitatory synapses over development have been observed throughout cortex, but their purpose and consequences remain unclear. Here, we propose that developmental changes in synaptic dynamics buffer the effect of slow inhibitory long-term plasticity, allowing for continuously stable neural activity. Using computational modeling we demonstrate that early in development excitatory short-term depression quickly stabilises neural activity, even in the face of strong, unbalanced excitation. We introduce a model of the commonly observed developmental shift from depression to facilitation and show that neural activity remains stable throughout development, while inhibitory synaptic plasticity slowly balances excitation, consistent with experimental observations. Our model predicts changes in the input responses from phasic to phasic-and-tonic and more precise spike timings. We also observe a gradual emergence of short-lasting memory traces governed by short-term plasticity development. We conclude that the developmental depression-to-facilitation shift may control excitation-inhibition balance throughout development with important functional consequences."}],"citation":{"ieee":"D. W. Jia, T. P. Vogels, and R. P. Costa, “Developmental depression-to-facilitation shift controls excitation-inhibition balance,” <i>Communications biology</i>, vol. 5. Springer Nature, 2022.","apa":"Jia, D. W., Vogels, T. P., &#38; Costa, R. P. (2022). Developmental depression-to-facilitation shift controls excitation-inhibition balance. <i>Communications Biology</i>. Springer Nature. <a href=\"https://doi.org/10.1038/s42003-022-03801-2\">https://doi.org/10.1038/s42003-022-03801-2</a>","chicago":"Jia, David W., Tim P Vogels, and Rui Ponte Costa. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” <i>Communications Biology</i>. Springer Nature, 2022. <a href=\"https://doi.org/10.1038/s42003-022-03801-2\">https://doi.org/10.1038/s42003-022-03801-2</a>.","ama":"Jia DW, Vogels TP, Costa RP. Developmental depression-to-facilitation shift controls excitation-inhibition balance. <i>Communications biology</i>. 2022;5. doi:<a href=\"https://doi.org/10.1038/s42003-022-03801-2\">10.1038/s42003-022-03801-2</a>","mla":"Jia, David W., et al. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” <i>Communications Biology</i>, vol. 5, 873, Springer Nature, 2022, doi:<a href=\"https://doi.org/10.1038/s42003-022-03801-2\">10.1038/s42003-022-03801-2</a>.","short":"D.W. Jia, T.P. Vogels, R.P. Costa, Communications Biology 5 (2022).","ista":"Jia DW, Vogels TP, Costa RP. 2022. Developmental depression-to-facilitation shift controls excitation-inhibition balance. Communications biology. 5, 873."},"publication_status":"published","file":[{"date_updated":"2022-09-05T08:55:11Z","access_level":"open_access","date_created":"2022-09-05T08:55:11Z","checksum":"3ec724c4f6d3440028c217305e32915f","file_name":"2022_CommBiology_Jia.pdf","file_size":2491191,"creator":"dernst","file_id":"12022","content_type":"application/pdf","relation":"main_file","success":1}],"date_created":"2022-09-04T22:02:02Z","department":[{"_id":"TiVo"}],"has_accepted_license":"1","language":[{"iso":"eng"}],"scopus_import":"1","publisher":"Springer Nature","date_published":"2022-08-25T00:00:00Z","article_type":"original","month":"08","file_date_updated":"2022-09-05T08:55:11Z","publication":"Communications biology","status":"public","intvolume":"         5","type":"journal_article","day":"25"},{"page":"899-925","file_date_updated":"2022-04-08T06:05:39Z","publication":"Neural Computation","issue":"4","status":"public","intvolume":"        33","type":"journal_article","day":"01","file":[{"success":1,"creator":"dernst","file_id":"11131","content_type":"application/pdf","relation":"main_file","date_created":"2022-04-08T06:05:39Z","checksum":"eac5a51c24c8989ae7cf9ae32ec3bc95","file_name":"2021_NeuralComputation_Zenke.pdf","file_size":1611614,"date_updated":"2022-04-08T06:05:39Z","access_level":"open_access"}],"date_created":"2020-08-12T12:08:24Z","department":[{"_id":"TiVo"}],"has_accepted_license":"1","language":[{"iso":"eng"}],"scopus_import":"1","publisher":"MIT Press","article_type":"original","date_published":"2021-03-01T00:00:00Z","month":"03","quality_controlled":"1","project":[{"_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","call_identifier":"H2020","grant_number":"819603"},{"name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","grant_number":"214316/Z/18/Z"}],"oa_version":"Published Version","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","acknowledgement":"F.Z. was supported by the Wellcome Trust (110124/Z/15/Z) and the Novartis Research Foundation. T.P.V. was supported by a Wellcome Trust Sir Henry Dale Research fellowship (WT100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant SYNAPSEEK.","publication_identifier":{"eissn":["1530-888X"],"issn":["0899-7667"]},"_id":"8253","pmid":1,"article_processing_charge":"No","date_updated":"2023-08-04T10:53:14Z","volume":33,"oa":1,"author":[{"last_name":"Zenke","full_name":"Zenke, Friedemann","orcid":"0000-0003-1883-644X","first_name":"Friedemann"},{"orcid":"0000-0003-3295-6181","last_name":"Vogels","full_name":"Vogels, Tim P","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"}],"abstract":[{"text":"Brains process information in spiking neural networks. Their intricate connections shape the diverse functions these networks perform. In comparison, the functional capabilities of models of spiking networks are still rudimentary. This shortcoming is mainly due to the lack of insight and practical algorithms to construct the necessary connectivity. Any such algorithm typically attempts to build networks by iteratively reducing the error compared to a desired output. But assigning credit to hidden units in multi-layered spiking networks has remained challenging due to the non-differentiable nonlinearity of spikes. To avoid this issue, one can employ surrogate gradients to discover the required connectivity in spiking network models. However, the choice of a surrogate is not unique, raising the question of how its implementation influences the effectiveness of the method. Here, we use numerical simulations to systematically study how essential design parameters of surrogate gradients impact learning performance on a range of classification problems. We show that surrogate gradient learning is robust to different shapes of underlying surrogate derivatives, but the choice of the derivative’s scale can substantially affect learning performance. When we combine surrogate gradients with a suitable activity regularization technique, robust information processing can be achieved in spiking networks even at the sparse activity limit. Our study provides a systematic account of the remarkable robustness of surrogate gradient learning and serves as a practical guide to model functional spiking neural networks.","lang":"eng"}],"citation":{"mla":"Zenke, Friedemann, and Tim P. Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” <i>Neural Computation</i>, vol. 33, no. 4, MIT Press, 2021, pp. 899–925, doi:<a href=\"https://doi.org/10.1162/neco_a_01367\">10.1162/neco_a_01367</a>.","ama":"Zenke F, Vogels TP. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. <i>Neural Computation</i>. 2021;33(4):899-925. doi:<a href=\"https://doi.org/10.1162/neco_a_01367\">10.1162/neco_a_01367</a>","short":"F. Zenke, T.P. Vogels, Neural Computation 33 (2021) 899–925.","ista":"Zenke F, Vogels TP. 2021. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. Neural Computation. 33(4), 899–925.","ieee":"F. Zenke and T. P. Vogels, “The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks,” <i>Neural Computation</i>, vol. 33, no. 4. MIT Press, pp. 899–925, 2021.","apa":"Zenke, F., &#38; Vogels, T. P. (2021). The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. <i>Neural Computation</i>. MIT Press. <a href=\"https://doi.org/10.1162/neco_a_01367\">https://doi.org/10.1162/neco_a_01367</a>","chicago":"Zenke, Friedemann, and Tim P Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” <i>Neural Computation</i>. MIT Press, 2021. <a href=\"https://doi.org/10.1162/neco_a_01367\">https://doi.org/10.1162/neco_a_01367</a>."},"publication_status":"published","ddc":["000","570"],"isi":1,"tmp":{"image":"/images/cc_by.png","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"external_id":{"pmid":["33513328"],"isi":["000663433900003"]},"title":"The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks","ec_funded":1,"year":"2021","doi":"10.1162/neco_a_01367"},{"month":"09","article_type":"original","date_published":"2020-09-17T00:00:00Z","scopus_import":"1","publisher":"eLife Sciences Publications","language":[{"iso":"eng"}],"has_accepted_license":"1","department":[{"_id":"TiVo"}],"file":[{"relation":"main_file","content_type":"application/pdf","creator":"cziletti","file_id":"8709","success":1,"access_level":"open_access","date_updated":"2020-10-27T11:37:32Z","file_size":17355867,"file_name":"2020_eLife_Gonçalves.pdf","checksum":"c4300ddcd93ed03fc9c6cdf1f77890be","date_created":"2020-10-27T11:37:32Z"}],"date_created":"2020-07-16T12:26:04Z","day":"17","type":"journal_article","intvolume":"         9","status":"public","publication":"eLife","file_date_updated":"2020-10-27T11:37:32Z","year":"2020","doi":"10.7554/eLife.56261","ec_funded":1,"external_id":{"pmid":["32940606"],"isi":["000584989400001"]},"title":"Training deep neural density estimators to identify mechanistic models of neural dynamics","tmp":{"image":"/images/cc_by.png","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"isi":1,"article_number":"e56261","ddc":["570"],"citation":{"chicago":"Gonçalves, Pedro J., Jan-Matthis Lueckmann, Michael Deistler, Marcel Nonnenmacher, Kaan Öcal, Giacomo Bassetto, Chaitanya Chintaluri, et al. “Training Deep Neural Density Estimators to Identify Mechanistic Models of Neural Dynamics.” <i>ELife</i>. eLife Sciences Publications, 2020. <a href=\"https://doi.org/10.7554/eLife.56261\">https://doi.org/10.7554/eLife.56261</a>.","ieee":"P. J. Gonçalves <i>et al.</i>, “Training deep neural density estimators to identify mechanistic models of neural dynamics,” <i>eLife</i>, vol. 9. eLife Sciences Publications, 2020.","apa":"Gonçalves, P. J., Lueckmann, J.-M., Deistler, M., Nonnenmacher, M., Öcal, K., Bassetto, G., … Macke, J. H. (2020). Training deep neural density estimators to identify mechanistic models of neural dynamics. <i>ELife</i>. eLife Sciences Publications. <a href=\"https://doi.org/10.7554/eLife.56261\">https://doi.org/10.7554/eLife.56261</a>","short":"P.J. Gonçalves, J.-M. Lueckmann, M. Deistler, M. Nonnenmacher, K. Öcal, G. Bassetto, C. Chintaluri, W.F. Podlaski, S.A. Haddad, T.P. Vogels, D.S. Greenberg, J.H. Macke, ELife 9 (2020).","ista":"Gonçalves PJ, Lueckmann J-M, Deistler M, Nonnenmacher M, Öcal K, Bassetto G, Chintaluri C, Podlaski WF, Haddad SA, Vogels TP, Greenberg DS, Macke JH. 2020. Training deep neural density estimators to identify mechanistic models of neural dynamics. eLife. 9, e56261.","mla":"Gonçalves, Pedro J., et al. “Training Deep Neural Density Estimators to Identify Mechanistic Models of Neural Dynamics.” <i>ELife</i>, vol. 9, e56261, eLife Sciences Publications, 2020, doi:<a href=\"https://doi.org/10.7554/eLife.56261\">10.7554/eLife.56261</a>.","ama":"Gonçalves PJ, Lueckmann J-M, Deistler M, et al. Training deep neural density estimators to identify mechanistic models of neural dynamics. <i>eLife</i>. 2020;9. doi:<a href=\"https://doi.org/10.7554/eLife.56261\">10.7554/eLife.56261</a>"},"publication_status":"published","abstract":[{"lang":"eng","text":"Mechanistic modeling in neuroscience aims to explain observed phenomena in terms of underlying causes. However, determining which model parameters agree with complex and stochastic neural data presents a significant challenge. We address this challenge with a machine learning tool which uses deep neural density estimators—trained using model simulations—to carry out Bayesian inference and retrieve the full space of parameters compatible with raw data or selected data features. Our method is scalable in parameters and data features and can rapidly analyze new data after initial training. We demonstrate the power and flexibility of our approach on receptive fields, ion channels, and Hodgkin–Huxley models. We also characterize the space of circuit configurations giving rise to rhythmic activity in the crustacean stomatogastric ganglion, and use these results to derive hypotheses for underlying compensation mechanisms. Our approach will help close the gap between data-driven and theory-driven models of neural dynamics."}],"author":[{"first_name":"Pedro J.","orcid":"0000-0002-6987-4836","full_name":"Gonçalves, Pedro J.","last_name":"Gonçalves"},{"last_name":"Lueckmann","full_name":"Lueckmann, Jan-Matthis","orcid":"0000-0003-4320-4663","first_name":"Jan-Matthis"},{"first_name":"Michael","full_name":"Deistler, Michael","last_name":"Deistler","orcid":"0000-0002-3573-0404"},{"orcid":"0000-0001-6044-6627","full_name":"Nonnenmacher, Marcel","last_name":"Nonnenmacher","first_name":"Marcel"},{"first_name":"Kaan","orcid":"0000-0002-8528-6858","full_name":"Öcal, Kaan","last_name":"Öcal"},{"first_name":"Giacomo","full_name":"Bassetto, Giacomo","last_name":"Bassetto"},{"id":"BA06AFEE-A4BA-11EA-AE5C-14673DDC885E","first_name":"Chaitanya","orcid":"0000-0003-4252-1608","full_name":"Chintaluri, Chaitanya","last_name":"Chintaluri"},{"first_name":"William F.","last_name":"Podlaski","full_name":"Podlaski, William F.","orcid":"0000-0001-6619-7502"},{"first_name":"Sara A.","orcid":"0000-0003-0807-0823","last_name":"Haddad","full_name":"Haddad, Sara A."},{"id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","last_name":"Vogels"},{"last_name":"Greenberg","full_name":"Greenberg, David S.","first_name":"David S."},{"first_name":"Jakob H.","orcid":"0000-0001-5154-8912","last_name":"Macke","full_name":"Macke, Jakob H."}],"article_processing_charge":"No","volume":9,"oa":1,"date_updated":"2023-08-22T07:54:52Z","publication_identifier":{"eissn":["2050-084X"]},"_id":"8127","pmid":1,"oa_version":"Published Version","quality_controlled":"1","project":[{"call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603"}],"acknowledgement":"We thank Mahmood S Hoseini and Michael Stryker for sharing their data for Figure 2, and Philipp Berens, Sean Bittner, Jan Boelts, John Cunningham, Richard Gao, Scott Linderman, Eve Marder, Iain Murray, George Papamakarios, Astrid Prinz, Auguste Schulz and Srinivas Turaga for discussions and/or comments on the manuscript. This work was supported by the German Research Foundation (DFG) through SFB 1233 ‘Robust Vision’, (276693517), SFB 1089 ‘Synaptic Microcircuits’, SPP 2041 ‘Computational Connectomics’ and Germany's Excellence Strategy – EXC-Number 2064/1 – Project number 390727645 and the German Federal Ministry of Education and Research (BMBF, project ‘ADIMEM’, FKZ 01IS18052 A-D) to JHM, a Sir Henry Dale Fellowship by the Wellcome Trust and the Royal Society (WT100000; WFP and TPV), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z; TPV), a ERC Consolidator Grant (SYNAPSEEK; WPF and CC), and a UK Research and Innovation, Biotechnology and Biological Sciences Research Council (CC, UKRI-BBSRC BB/N019512/1). We gratefully acknowledge the Leibniz Supercomputing Centre for funding this project by providing computing time on its Linux-Cluster.","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8"},{"title":"A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network","ec_funded":1,"year":"2020","related_material":{"link":[{"relation":"is_continued_by","url":"https://doi.org/10.1101/2020.10.24.353409"}],"record":[{"id":"14422","relation":"dissertation_contains","status":"public"}]},"main_file_link":[{"open_access":"1","url":"https://proceedings.neurips.cc/paper/2020/hash/bdbd5ebfde4934142c8a88e7a3796cd5-Abstract.html"}],"author":[{"last_name":"Confavreux","full_name":"Confavreux, Basile J","first_name":"Basile J","id":"C7610134-B532-11EA-BD9F-F5753DDC885E"},{"first_name":"Friedemann","full_name":"Zenke, Friedemann","last_name":"Zenke"},{"full_name":"Agnes, Everton J.","last_name":"Agnes","first_name":"Everton J."},{"first_name":"Timothy","last_name":"Lillicrap","full_name":"Lillicrap, Timothy"},{"first_name":"Tim P","full_name":"Vogels, Tim P","last_name":"Vogels","orcid":"0000-0003-3295-6181","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"}],"abstract":[{"text":"The search for biologically faithful synaptic plasticity rules has resulted in a large body of models. They are usually inspired by – and fitted to – experimental data, but they rarely produce neural dynamics that serve complex functions. These failures suggest that current plasticity models are still under-constrained by existing data. Here, we present an alternative approach that uses meta-learning to discover plausible synaptic plasticity rules. Instead of experimental data, the rules are constrained by the functions they implement and the structure they are meant to produce. Briefly, we parameterize synaptic plasticity rules by a Volterra expansion and then use supervised learning methods (gradient descent or evolutionary strategies) to minimize a problem-dependent loss function that quantifies how effectively a candidate plasticity rule transforms an initially random network into one with the desired function. We first validate our approach by re-discovering previously described plasticity rules, starting at the single-neuron level and “Oja’s rule”, a simple Hebbian plasticity rule that captures the direction of most variability of inputs to a neuron (i.e., the first principal component). We expand the problem to the network level and ask the framework to find Oja’s rule together with an anti-Hebbian rule such that an initially random two-layer firing-rate network will recover several principal components of the input space after learning. Next, we move to networks of integrate-and-fire neurons with plastic inhibitory afferents. We train for rules that achieve a target firing rate by countering tuned excitation. Our algorithm discovers a specific subset of the manifold of rules that can solve this task. Our work is a proof of principle of an automated and unbiased approach to unveil synaptic plasticity rules that obey biological constraints and can solve complex functions.","lang":"eng"}],"publication_status":"published","citation":{"apa":"Confavreux, B. J., Zenke, F., Agnes, E. J., Lillicrap, T., &#38; Vogels, T. P. (2020). A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In <i>Advances in Neural Information Processing Systems</i> (Vol. 33, pp. 16398–16408). Vancouver, Canada.","ieee":"B. J. Confavreux, F. Zenke, E. J. Agnes, T. Lillicrap, and T. P. Vogels, “A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network,” in <i>Advances in Neural Information Processing Systems</i>, Vancouver, Canada, 2020, vol. 33, pp. 16398–16408.","chicago":"Confavreux, Basile J, Friedemann Zenke, Everton J. Agnes, Timothy Lillicrap, and Tim P Vogels. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” In <i>Advances in Neural Information Processing Systems</i>, 33:16398–408, 2020.","mla":"Confavreux, Basile J., et al. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” <i>Advances in Neural Information Processing Systems</i>, vol. 33, 2020, pp. 16398–408.","ama":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In: <i>Advances in Neural Information Processing Systems</i>. Vol 33. ; 2020:16398-16408.","short":"B.J. Confavreux, F. Zenke, E.J. Agnes, T. Lillicrap, T.P. Vogels, in:, Advances in Neural Information Processing Systems, 2020, pp. 16398–16408.","ista":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. 2020. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. Advances in Neural Information Processing Systems. NeurIPS: Conference on Neural Information Processing Systems vol. 33, 16398–16408."},"user_id":"6785fbc1-c503-11eb-8a32-93094b40e1cf","acknowledgement":"We would like to thank Chaitanya Chintaluri, Georgia Christodoulou, Bill Podlaski and Merima Šabanovic for useful discussions and comments. This work was supported by a Wellcome Trust ´ Senior Research Fellowship (214316/Z/18/Z), a BBSRC grant (BB/N019512/1), an ERC consolidator Grant (SYNAPSEEK), a Leverhulme Trust Project Grant (RPG-2016-446), and funding from École Polytechnique, Paris.","project":[{"grant_number":"214316/Z/18/Z","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87"},{"grant_number":"819603","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."}],"oa_version":"Published Version","quality_controlled":"1","_id":"9633","publication_identifier":{"issn":["1049-5258"]},"volume":33,"oa":1,"date_updated":"2023-10-18T09:20:55Z","article_processing_charge":"No","language":[{"iso":"eng"}],"scopus_import":"1","date_published":"2020-12-06T00:00:00Z","month":"12","date_created":"2021-07-04T22:01:27Z","conference":{"location":"Vancouver, Canada","end_date":"2020-12-12","name":"NeurIPS: Conference on Neural Information Processing Systems","start_date":"2020-12-06"},"department":[{"_id":"TiVo"}],"status":"public","intvolume":"        33","type":"conference","day":"06","page":"16398-16408","publication":"Advances in Neural Information Processing Systems"}]
