[{"year":"2021","oa":1,"page":"221-280","article_type":"original","publication":"Probability and Mathematical Physics","date_updated":"2024-02-19T08:30:00Z","date_published":"2021-05-21T00:00:00Z","external_id":{"arxiv":["1907.13631"]},"date_created":"2024-02-18T23:01:03Z","scopus_import":"1","department":[{"_id":"LaEr"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","quality_controlled":"1","type":"journal_article","language":[{"iso":"eng"}],"publisher":"Mathematical Sciences Publishers","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.1907.13631","open_access":"1"}],"intvolume":"         2","abstract":[{"lang":"eng","text":"We consider random n×n matrices X with independent and centered entries and a general variance profile. We show that the spectral radius of X converges with very high probability to the square root of the spectral radius of the variance matrix of X when n tends to infinity. We also establish the optimal rate of convergence, that is a new result even for general i.i.d. matrices beyond the explicitly solvable Gaussian cases. The main ingredient is the proof of the local inhomogeneous circular law [arXiv:1612.07776] at the spectral edge."}],"issue":"2","doi":"10.2140/pmp.2021.2.221","title":"Spectral radius of random matrices with independent entries","author":[{"first_name":"Johannes","id":"36D3D8B6-F248-11E8-B48F-1D18A9856A87","last_name":"Alt","full_name":"Alt, Johannes"},{"first_name":"László","id":"4DBD5372-F248-11E8-B48F-1D18A9856A87","last_name":"Erdös","full_name":"Erdös, László","orcid":"0000-0001-5366-9603"},{"last_name":"Krüger","full_name":"Krüger, Torben H","first_name":"Torben H","id":"3020C786-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0002-4821-3297"}],"_id":"15013","status":"public","publication_status":"published","acknowledgement":"Partially supported by ERC Starting Grant RandMat No. 715539 and the SwissMap grant of Swiss National Science Foundation. Partially supported by ERC Advanced Grant RanMat No. 338804. Partially supported by the Hausdorff Center for Mathematics in Bonn.","ec_funded":1,"publication_identifier":{"eissn":["2690-1005"],"issn":["2690-0998"]},"project":[{"_id":"258DCDE6-B435-11E9-9278-68D0E5697425","grant_number":"338804","call_identifier":"FP7","name":"Random matrices, universality and disordered quantum systems"}],"oa_version":"Preprint","citation":{"mla":"Alt, Johannes, et al. “Spectral Radius of Random Matrices with Independent Entries.” <i>Probability and Mathematical Physics</i>, vol. 2, no. 2, Mathematical Sciences Publishers, 2021, pp. 221–80, doi:<a href=\"https://doi.org/10.2140/pmp.2021.2.221\">10.2140/pmp.2021.2.221</a>.","chicago":"Alt, Johannes, László Erdös, and Torben H Krüger. “Spectral Radius of Random Matrices with Independent Entries.” <i>Probability and Mathematical Physics</i>. Mathematical Sciences Publishers, 2021. <a href=\"https://doi.org/10.2140/pmp.2021.2.221\">https://doi.org/10.2140/pmp.2021.2.221</a>.","ieee":"J. Alt, L. Erdös, and T. H. Krüger, “Spectral radius of random matrices with independent entries,” <i>Probability and Mathematical Physics</i>, vol. 2, no. 2. Mathematical Sciences Publishers, pp. 221–280, 2021.","apa":"Alt, J., Erdös, L., &#38; Krüger, T. H. (2021). Spectral radius of random matrices with independent entries. <i>Probability and Mathematical Physics</i>. Mathematical Sciences Publishers. <a href=\"https://doi.org/10.2140/pmp.2021.2.221\">https://doi.org/10.2140/pmp.2021.2.221</a>","short":"J. Alt, L. Erdös, T.H. Krüger, Probability and Mathematical Physics 2 (2021) 221–280.","ama":"Alt J, Erdös L, Krüger TH. Spectral radius of random matrices with independent entries. <i>Probability and Mathematical Physics</i>. 2021;2(2):221-280. doi:<a href=\"https://doi.org/10.2140/pmp.2021.2.221\">10.2140/pmp.2021.2.221</a>","ista":"Alt J, Erdös L, Krüger TH. 2021. Spectral radius of random matrices with independent entries. Probability and Mathematical Physics. 2(2), 221–280."},"day":"21","month":"05","article_processing_charge":"No","volume":2,"arxiv":1},{"status":"public","related_material":{"record":[{"relation":"used_in_publication","status":"public","id":"9928"}]},"date_updated":"2023-08-11T10:44:21Z","year":"2021","abstract":[{"text":"This dataset comprises all data shown in the figures of the submitted article \"Geometric superinductance qubits: Controlling phase delocalization across a single Josephson junction\". Additional raw data are available from the corresponding author on reasonable request.","lang":"eng"}],"doi":"10.5281/ZENODO.5592103","author":[{"orcid":"0000-0002-3415-4628","full_name":"Peruzzo, Matilda","last_name":"Peruzzo","id":"3F920B30-F248-11E8-B48F-1D18A9856A87","first_name":"Matilda"},{"orcid":"0000-0001-6937-5773","full_name":"Hassani, Farid","last_name":"Hassani","id":"2AED110C-F248-11E8-B48F-1D18A9856A87","first_name":"Farid"},{"last_name":"Szep","full_name":"Szep, Grisha","first_name":"Grisha"},{"first_name":"Andrea","id":"42F71B44-F248-11E8-B48F-1D18A9856A87","last_name":"Trioni","full_name":"Trioni, Andrea"},{"first_name":"Elena","id":"2C21D6E8-F248-11E8-B48F-1D18A9856A87","last_name":"Redchenko","full_name":"Redchenko, Elena"},{"last_name":"Zemlicka","full_name":"Zemlicka, Martin","first_name":"Martin","id":"2DCF8DE6-F248-11E8-B48F-1D18A9856A87"},{"last_name":"Fink","full_name":"Fink, Johannes M","first_name":"Johannes M","id":"4B591CBA-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0001-8112-028X"}],"oa":1,"title":"Geometric superinductance qubits: Controlling phase delocalization across a single Josephson junction","_id":"13057","type":"research_data_reference","article_processing_charge":"No","tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","image":"/images/cc_by.png","short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode"},"main_file_link":[{"open_access":"1","url":"https://doi.org/10.5281/zenodo.5592104"}],"publisher":"Zenodo","date_published":"2021-10-22T00:00:00Z","oa_version":"Published Version","date_created":"2023-05-23T13:42:27Z","department":[{"_id":"JoFi"}],"day":"22","citation":{"chicago":"Peruzzo, Matilda, Farid Hassani, Grisha Szep, Andrea Trioni, Elena Redchenko, Martin Zemlicka, and Johannes M Fink. “Geometric Superinductance Qubits: Controlling Phase Delocalization across a Single Josephson Junction.” Zenodo, 2021. <a href=\"https://doi.org/10.5281/ZENODO.5592103\">https://doi.org/10.5281/ZENODO.5592103</a>.","mla":"Peruzzo, Matilda, et al. <i>Geometric Superinductance Qubits: Controlling Phase Delocalization across a Single Josephson Junction</i>. Zenodo, 2021, doi:<a href=\"https://doi.org/10.5281/ZENODO.5592103\">10.5281/ZENODO.5592103</a>.","apa":"Peruzzo, M., Hassani, F., Szep, G., Trioni, A., Redchenko, E., Zemlicka, M., &#38; Fink, J. M. (2021). Geometric superinductance qubits: Controlling phase delocalization across a single Josephson junction. Zenodo. <a href=\"https://doi.org/10.5281/ZENODO.5592103\">https://doi.org/10.5281/ZENODO.5592103</a>","ieee":"M. Peruzzo <i>et al.</i>, “Geometric superinductance qubits: Controlling phase delocalization across a single Josephson junction.” Zenodo, 2021.","ama":"Peruzzo M, Hassani F, Szep G, et al. Geometric superinductance qubits: Controlling phase delocalization across a single Josephson junction. 2021. doi:<a href=\"https://doi.org/10.5281/ZENODO.5592103\">10.5281/ZENODO.5592103</a>","ista":"Peruzzo M, Hassani F, Szep G, Trioni A, Redchenko E, Zemlicka M, Fink JM. 2021. Geometric superinductance qubits: Controlling phase delocalization across a single Josephson junction, Zenodo, <a href=\"https://doi.org/10.5281/ZENODO.5592103\">10.5281/ZENODO.5592103</a>.","short":"M. Peruzzo, F. Hassani, G. Szep, A. Trioni, E. Redchenko, M. Zemlicka, J.M. Fink, (2021)."},"ddc":["530"],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","month":"10"},{"publisher":"Zenodo","tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","image":"/images/cc_by.png","short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode"},"main_file_link":[{"url":"https://doi.org/10.5281/zenodo.5257161","open_access":"1"}],"article_processing_charge":"No","type":"research_data_reference","citation":{"ieee":"M. C. Ucar, “Source data for the manuscript ‘Theory of branching morphogenesis by local interactions and global guidance.’” Zenodo, 2021.","apa":"Ucar, M. C. (2021). Source data for the manuscript “Theory of branching morphogenesis by local interactions and global guidance.” Zenodo. <a href=\"https://doi.org/10.5281/ZENODO.5257160\">https://doi.org/10.5281/ZENODO.5257160</a>","ama":"Ucar MC. Source data for the manuscript “Theory of branching morphogenesis by local interactions and global guidance.” 2021. doi:<a href=\"https://doi.org/10.5281/ZENODO.5257160\">10.5281/ZENODO.5257160</a>","short":"M.C. Ucar, (2021).","ista":"Ucar MC. 2021. Source data for the manuscript ‘Theory of branching morphogenesis by local interactions and global guidance’, Zenodo, <a href=\"https://doi.org/10.5281/ZENODO.5257160\">10.5281/ZENODO.5257160</a>.","chicago":"Ucar, Mehmet C. “Source Data for the Manuscript ‘Theory of Branching Morphogenesis by Local Interactions and Global Guidance.’” Zenodo, 2021. <a href=\"https://doi.org/10.5281/ZENODO.5257160\">https://doi.org/10.5281/ZENODO.5257160</a>.","mla":"Ucar, Mehmet C. <i>Source Data for the Manuscript “Theory of Branching Morphogenesis by Local Interactions and Global Guidance.”</i> Zenodo, 2021, doi:<a href=\"https://doi.org/10.5281/ZENODO.5257160\">10.5281/ZENODO.5257160</a>."},"department":[{"_id":"EdHa"}],"day":"25","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","ddc":["570"],"month":"08","date_published":"2021-08-25T00:00:00Z","oa_version":"Published Version","date_created":"2023-05-23T13:46:34Z","date_updated":"2023-08-14T13:18:46Z","related_material":{"record":[{"status":"public","relation":"used_in_publication","id":"10402"}]},"status":"public","oa":1,"title":"Source data for the manuscript \"Theory of branching morphogenesis by local interactions and global guidance\"","author":[{"orcid":"0000-0003-0506-4217","full_name":"Ucar, Mehmet C","last_name":"Ucar","id":"50B2A802-6007-11E9-A42B-EB23E6697425","first_name":"Mehmet C"}],"_id":"13058","abstract":[{"lang":"eng","text":"The zip file includes source data used in the main text of the manuscript \"Theory of branching morphogenesis by local interactions and global guidance\", as well as a representative Jupyter notebook to reproduce the main figures. A sample script for the simulations of branching and annihilating random walks is also included (Sample_script_for_simulations_of_BARWs.ipynb) to generate exemplary branched networks under external guidance. A detailed description of the simulation setup is provided in the supplementary information of the manuscipt."}],"year":"2021","doi":"10.5281/ZENODO.5257160"},{"date_created":"2023-05-23T16:14:35Z","oa_version":"Published Version","date_published":"2021-10-29T00:00:00Z","license":"https://creativecommons.org/publicdomain/zero/1.0/","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","ddc":["570"],"month":"10","day":"29","department":[{"_id":"SyCr"}],"citation":{"apa":"Casillas Perez, B. E., Pull, C., Naiser, F., Naderlinger, E., Matas, J., &#38; Cremer, S. (2021). Early queen infection shapes developmental dynamics and induces long-term disease protection in incipient ant colonies. Dryad. <a href=\"https://doi.org/10.5061/DRYAD.7PVMCVDTJ\">https://doi.org/10.5061/DRYAD.7PVMCVDTJ</a>","ieee":"B. E. Casillas Perez, C. Pull, F. Naiser, E. Naderlinger, J. Matas, and S. Cremer, “Early queen infection shapes developmental dynamics and induces long-term disease protection in incipient ant colonies.” Dryad, 2021.","ama":"Casillas Perez BE, Pull C, Naiser F, Naderlinger E, Matas J, Cremer S. Early queen infection shapes developmental dynamics and induces long-term disease protection in incipient ant colonies. 2021. doi:<a href=\"https://doi.org/10.5061/DRYAD.7PVMCVDTJ\">10.5061/DRYAD.7PVMCVDTJ</a>","short":"B.E. Casillas Perez, C. Pull, F. Naiser, E. Naderlinger, J. Matas, S. Cremer, (2021).","ista":"Casillas Perez BE, Pull C, Naiser F, Naderlinger E, Matas J, Cremer S. 2021. Early queen infection shapes developmental dynamics and induces long-term disease protection in incipient ant colonies, Dryad, <a href=\"https://doi.org/10.5061/DRYAD.7PVMCVDTJ\">10.5061/DRYAD.7PVMCVDTJ</a>.","chicago":"Casillas Perez, Barbara E, Christopher Pull, Filip Naiser, Elisabeth Naderlinger, Jiri Matas, and Sylvia Cremer. “Early Queen Infection Shapes Developmental Dynamics and Induces Long-Term Disease Protection in Incipient Ant Colonies.” Dryad, 2021. <a href=\"https://doi.org/10.5061/DRYAD.7PVMCVDTJ\">https://doi.org/10.5061/DRYAD.7PVMCVDTJ</a>.","mla":"Casillas Perez, Barbara E., et al. <i>Early Queen Infection Shapes Developmental Dynamics and Induces Long-Term Disease Protection in Incipient Ant Colonies</i>. Dryad, 2021, doi:<a href=\"https://doi.org/10.5061/DRYAD.7PVMCVDTJ\">10.5061/DRYAD.7PVMCVDTJ</a>."},"type":"research_data_reference","article_processing_charge":"No","tmp":{"short":"CC0 (1.0)","legal_code_url":"https://creativecommons.org/publicdomain/zero/1.0/legalcode","image":"/images/cc_0.png","name":"Creative Commons Public Domain Dedication (CC0 1.0)"},"main_file_link":[{"open_access":"1","url":"https://doi.org/10.5061/dryad.7pvmcvdtj"}],"publisher":"Dryad","doi":"10.5061/DRYAD.7PVMCVDTJ","year":"2021","abstract":[{"text":"Infections early in life can have enduring effects on an organism’s development and immunity. In this study, we show that this equally applies to developing “superorganisms” – incipient social insect colonies. When we exposed newly mated Lasius niger ant queens to a low pathogen dose, their colonies grew more slowly than controls before winter, but reached similar sizes afterwards. Independent of exposure, queen hibernation survival improved when the ratio of pupae to workers was small. Queens that reared fewer pupae before worker emergence exhibited lower pathogen levels, indicating that high brood rearing efforts interfere with the ability of the queen’s immune system to suppress pathogen proliferation. Early-life queen pathogen-exposure also improved the immunocompetence of her worker offspring, as demonstrated by challenging the workers to the same pathogen a year later. Transgenerational transfer of the queen’s pathogen experience to her workforce can hence durably reduce the disease susceptibility of the whole superorganism.","lang":"eng"}],"_id":"13061","author":[{"full_name":"Casillas Perez, Barbara E","last_name":"Casillas Perez","id":"351ED2AA-F248-11E8-B48F-1D18A9856A87","first_name":"Barbara E"},{"orcid":"0000-0003-1122-3982","full_name":"Pull, Christopher","last_name":"Pull","id":"3C7F4840-F248-11E8-B48F-1D18A9856A87","first_name":"Christopher"},{"first_name":"Filip","last_name":"Naiser","full_name":"Naiser, Filip"},{"full_name":"Naderlinger, Elisabeth","last_name":"Naderlinger","first_name":"Elisabeth"},{"full_name":"Matas, Jiri","last_name":"Matas","first_name":"Jiri"},{"orcid":"0000-0002-2193-3868","last_name":"Cremer","full_name":"Cremer, Sylvia","first_name":"Sylvia","id":"2F64EC8C-F248-11E8-B48F-1D18A9856A87"}],"oa":1,"title":"Early queen infection shapes developmental dynamics and induces long-term disease protection in incipient ant colonies","ec_funded":1,"status":"public","related_material":{"record":[{"id":"10284","relation":"used_in_publication","status":"public"}]},"date_updated":"2023-08-14T11:45:28Z","project":[{"name":"Epidemics in ant societies on a chip","call_identifier":"H2020","grant_number":"771402","_id":"2649B4DE-B435-11E9-9278-68D0E5697425"}]},{"tmp":{"short":"CC0 (1.0)","legal_code_url":"https://creativecommons.org/publicdomain/zero/1.0/legalcode","image":"/images/cc_0.png","name":"Creative Commons Public Domain Dedication (CC0 1.0)"},"main_file_link":[{"url":"https://doi.org/10.5061/dryad.8gtht76p1","open_access":"1"}],"publisher":"Dryad","type":"research_data_reference","article_processing_charge":"No","department":[{"_id":"NiBa"}],"day":"02","citation":{"mla":"Szep, Eniko, et al. <i>Supplementary Code for: Polygenic Local Adaptation in Metapopulations: A Stochastic Eco-Evolutionary Model</i>. Dryad, 2021, doi:<a href=\"https://doi.org/10.5061/DRYAD.8GTHT76P1\">10.5061/DRYAD.8GTHT76P1</a>.","chicago":"Szep, Eniko, Himani Sachdeva, and Nicholas H Barton. “Supplementary Code for: Polygenic Local Adaptation in Metapopulations: A Stochastic Eco-Evolutionary Model.” Dryad, 2021. <a href=\"https://doi.org/10.5061/DRYAD.8GTHT76P1\">https://doi.org/10.5061/DRYAD.8GTHT76P1</a>.","ama":"Szep E, Sachdeva H, Barton NH. Supplementary code for: Polygenic local adaptation in metapopulations: A stochastic eco-evolutionary model. 2021. doi:<a href=\"https://doi.org/10.5061/DRYAD.8GTHT76P1\">10.5061/DRYAD.8GTHT76P1</a>","ista":"Szep E, Sachdeva H, Barton NH. 2021. Supplementary code for: Polygenic local adaptation in metapopulations: A stochastic eco-evolutionary model, Dryad, <a href=\"https://doi.org/10.5061/DRYAD.8GTHT76P1\">10.5061/DRYAD.8GTHT76P1</a>.","short":"E. Szep, H. Sachdeva, N.H. Barton, (2021).","apa":"Szep, E., Sachdeva, H., &#38; Barton, N. H. (2021). Supplementary code for: Polygenic local adaptation in metapopulations: A stochastic eco-evolutionary model. Dryad. <a href=\"https://doi.org/10.5061/DRYAD.8GTHT76P1\">https://doi.org/10.5061/DRYAD.8GTHT76P1</a>","ieee":"E. Szep, H. Sachdeva, and N. H. Barton, “Supplementary code for: Polygenic local adaptation in metapopulations: A stochastic eco-evolutionary model.” Dryad, 2021."},"ddc":["570"],"month":"03","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","date_published":"2021-03-02T00:00:00Z","oa_version":"Published Version","date_created":"2023-05-23T16:17:02Z","date_updated":"2023-09-05T15:44:05Z","status":"public","related_material":{"record":[{"relation":"used_in_publication","status":"public","id":"9252"}]},"author":[{"last_name":"Szep","full_name":"Szep, Eniko","first_name":"Eniko","id":"485BB5A4-F248-11E8-B48F-1D18A9856A87"},{"id":"42377A0A-F248-11E8-B48F-1D18A9856A87","first_name":"Himani","full_name":"Sachdeva, Himani","last_name":"Sachdeva"},{"orcid":"0000-0002-8548-5240","first_name":"Nicholas H","id":"4880FE40-F248-11E8-B48F-1D18A9856A87","last_name":"Barton","full_name":"Barton, Nicholas H"}],"title":"Supplementary code for: Polygenic local adaptation in metapopulations: A stochastic eco-evolutionary model","oa":1,"_id":"13062","year":"2021","abstract":[{"text":"This paper analyzes the conditions for local adaptation in a metapopulation with infinitely many islands under a model of hard selection, where population size depends on local fitness. Each island belongs to one of two distinct ecological niches or habitats. Fitness is influenced by an additive trait which is under habitat-dependent directional selection. Our analysis is based on the diffusion approximation and  accounts for both genetic drift and demographic stochasticity. By neglecting linkage disequilibria, it yields the joint distribution of allele frequencies and population size on each island. We find that under hard selection, the conditions for local adaptation in a rare habitat are more restrictive for more polygenic traits: even moderate migration load per locus at very many loci is sufficient for population sizes to decline. This further reduces the efficacy of selection at individual loci due to increased drift and because smaller populations are more prone to swamping due to migration, causing a positive feedback between increasing maladaptation and declining population sizes. Our analysis also highlights the importance of demographic stochasticity, which  exacerbates the decline in numbers of maladapted populations, leading to population collapse in the rare habitat at significantly lower migration than predicted by deterministic arguments.","lang":"eng"}],"doi":"10.5061/DRYAD.8GTHT76P1"},{"type":"research_data_reference","article_processing_charge":"No","tmp":{"short":"CC0 (1.0)","legal_code_url":"https://creativecommons.org/publicdomain/zero/1.0/legalcode","image":"/images/cc_0.png","name":"Creative Commons Public Domain Dedication (CC0 1.0)"},"main_file_link":[{"open_access":"1","url":"https://doi.org/10.5061/dryad.sqv9s4n51"}],"publisher":"Dryad","date_published":"2021-11-04T00:00:00Z","oa_version":"Published Version","date_created":"2023-05-23T16:20:16Z","department":[{"_id":"MaRo"}],"day":"04","citation":{"chicago":"Robinson, Matthew Richard. “Probabilistic Inference of the Genetic Architecture of Functional Enrichment of Complex Traits.” Dryad, 2021. <a href=\"https://doi.org/10.5061/dryad.sqv9s4n51\">https://doi.org/10.5061/dryad.sqv9s4n51</a>.","mla":"Robinson, Matthew Richard. <i>Probabilistic Inference of the Genetic Architecture of Functional Enrichment of Complex Traits</i>. Dryad, 2021, doi:<a href=\"https://doi.org/10.5061/dryad.sqv9s4n51\">10.5061/dryad.sqv9s4n51</a>.","ista":"Robinson MR. 2021. Probabilistic inference of the genetic architecture of functional enrichment of complex traits, Dryad, <a href=\"https://doi.org/10.5061/dryad.sqv9s4n51\">10.5061/dryad.sqv9s4n51</a>.","short":"M.R. Robinson, (2021).","ama":"Robinson MR. Probabilistic inference of the genetic architecture of functional enrichment of complex traits. 2021. doi:<a href=\"https://doi.org/10.5061/dryad.sqv9s4n51\">10.5061/dryad.sqv9s4n51</a>","ieee":"M. R. Robinson, “Probabilistic inference of the genetic architecture of functional enrichment of complex traits.” Dryad, 2021.","apa":"Robinson, M. R. (2021). Probabilistic inference of the genetic architecture of functional enrichment of complex traits. Dryad. <a href=\"https://doi.org/10.5061/dryad.sqv9s4n51\">https://doi.org/10.5061/dryad.sqv9s4n51</a>"},"month":"11","ddc":["570"],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","status":"public","related_material":{"record":[{"relation":"used_in_publication","status":"public","id":"8429"}],"link":[{"url":"https://github.com/medical-genomics-group/gmrm","relation":"software"}]},"date_updated":"2023-09-26T10:36:15Z","year":"2021","abstract":[{"text":"We develop a Bayesian model (BayesRR-RC) that provides robust SNP-heritability estimation, an alternative to marker discovery, and accurate genomic prediction, taking 22 seconds per iteration to estimate 8.4 million SNP-effects and 78 SNP-heritability parameters in the UK Biobank. We find that only $\\leq$ 10\\% of the genetic variation captured for height, body mass index, cardiovascular disease, and type 2 diabetes is attributable to proximal regulatory regions within 10kb upstream of genes, while 12-25% is attributed to coding regions, 32-44% to introns, and 22-28% to distal 10-500kb upstream regions. Up to 24% of all cis and coding regions of each chromosome are associated with each trait, with over 3,100 independent exonic and intronic regions and over 5,400 independent regulatory regions having &gt;95% probability of contributing &gt;0.001% to the genetic variance of these four traits. Our open-source software (GMRM) provides a scalable alternative to current approaches for biobank data.","lang":"eng"}],"doi":"10.5061/dryad.sqv9s4n51","author":[{"orcid":"0000-0001-8982-8813","first_name":"Matthew Richard","id":"E5D42276-F5DA-11E9-8E24-6303E6697425","last_name":"Robinson","full_name":"Robinson, Matthew Richard"}],"oa":1,"title":"Probabilistic inference of the genetic architecture of functional enrichment of complex traits","_id":"13063"},{"oa_version":"Published Version","date_created":"2023-05-23T16:39:24Z","date_published":"2021-07-30T00:00:00Z","month":"07","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","ddc":["570"],"department":[{"_id":"EdHa"}],"day":"30","citation":{"ama":"Randriamanantsoa S, Papargyriou A, Maurer C, et al. Spatiotemporal dynamics of self-organized branching in pancreas-derived organoids. 2021. doi:<a href=\"https://doi.org/10.5281/ZENODO.5148117\">10.5281/ZENODO.5148117</a>","ista":"Randriamanantsoa S, Papargyriou A, Maurer C, Peschke K, Schuster M, Zecchin G, Steiger K, Öllinger R, Saur D, Scheel C, Rad R, Hannezo EB, Reichert M, Bausch AR. 2021. Spatiotemporal dynamics of self-organized branching in pancreas-derived organoids, Zenodo, <a href=\"https://doi.org/10.5281/ZENODO.5148117\">10.5281/ZENODO.5148117</a>.","short":"S. Randriamanantsoa, A. Papargyriou, C. Maurer, K. Peschke, M. Schuster, G. Zecchin, K. Steiger, R. Öllinger, D. Saur, C. Scheel, R. Rad, E.B. Hannezo, M. Reichert, A.R. Bausch, (2021).","ieee":"S. Randriamanantsoa <i>et al.</i>, “Spatiotemporal dynamics of self-organized branching in pancreas-derived organoids.” Zenodo, 2021.","apa":"Randriamanantsoa, S., Papargyriou, A., Maurer, C., Peschke, K., Schuster, M., Zecchin, G., … Bausch, A. R. (2021). Spatiotemporal dynamics of self-organized branching in pancreas-derived organoids. Zenodo. <a href=\"https://doi.org/10.5281/ZENODO.5148117\">https://doi.org/10.5281/ZENODO.5148117</a>","chicago":"Randriamanantsoa, Samuel, Aristeidis Papargyriou, Carlo Maurer, Katja Peschke, Maximilian Schuster, Giulia Zecchin, Katja Steiger, et al. “Spatiotemporal Dynamics of Self-Organized Branching in Pancreas-Derived Organoids.” Zenodo, 2021. <a href=\"https://doi.org/10.5281/ZENODO.5148117\">https://doi.org/10.5281/ZENODO.5148117</a>.","mla":"Randriamanantsoa, Samuel, et al. <i>Spatiotemporal Dynamics of Self-Organized Branching in Pancreas-Derived Organoids</i>. Zenodo, 2021, doi:<a href=\"https://doi.org/10.5281/ZENODO.5148117\">10.5281/ZENODO.5148117</a>."},"type":"research_data_reference","article_processing_charge":"No","tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","image":"/images/cc_by.png","short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode"},"main_file_link":[{"url":"https://doi.org/10.5281/zenodo.6577226","open_access":"1"}],"publisher":"Zenodo","doi":"10.5281/ZENODO.5148117","year":"2021","abstract":[{"text":"Source data and source code for the graphs in \"Spatiotemporal dynamics of self-organized branching pancreatic cancer-derived organoids\".","lang":"eng"}],"_id":"13068","author":[{"first_name":"Samuel","last_name":"Randriamanantsoa","full_name":"Randriamanantsoa, Samuel"},{"first_name":"Aristeidis","last_name":"Papargyriou","full_name":"Papargyriou, Aristeidis"},{"first_name":"Carlo","full_name":"Maurer, Carlo","last_name":"Maurer"},{"last_name":"Peschke","full_name":"Peschke, Katja","first_name":"Katja"},{"full_name":"Schuster, Maximilian","last_name":"Schuster","first_name":"Maximilian"},{"first_name":"Giulia","full_name":"Zecchin, Giulia","last_name":"Zecchin"},{"full_name":"Steiger, Katja","last_name":"Steiger","first_name":"Katja"},{"last_name":"Öllinger","full_name":"Öllinger, Rupert","first_name":"Rupert"},{"first_name":"Dieter","full_name":"Saur, Dieter","last_name":"Saur"},{"full_name":"Scheel, Christina","last_name":"Scheel","first_name":"Christina"},{"first_name":"Roland","last_name":"Rad","full_name":"Rad, Roland"},{"id":"3A9DB764-F248-11E8-B48F-1D18A9856A87","first_name":"Edouard B","full_name":"Hannezo, Edouard B","last_name":"Hannezo","orcid":"0000-0001-6005-1561"},{"first_name":"Maximilian","full_name":"Reichert, Maximilian","last_name":"Reichert"},{"first_name":"Andreas R.","full_name":"Bausch, Andreas R.","last_name":"Bausch"}],"title":"Spatiotemporal dynamics of self-organized branching in pancreas-derived organoids","oa":1,"status":"public","related_material":{"record":[{"status":"public","relation":"used_in_publication","id":"12217"}]},"date_updated":"2023-08-04T09:25:23Z"},{"related_material":{"record":[{"id":"10322","relation":"used_in_publication","status":"public"}]},"status":"public","date_updated":"2023-08-14T11:53:26Z","doi":"10.5281/ZENODO.5519410","abstract":[{"lang":"eng","text":"To survive elevated temperatures, ectotherms adjust the fluidity of membranes by fine-tuning lipid desaturation levels in a process previously described to be cell-autonomous. We have discovered that, in Caenorhabditis elegans, neuronal Heat shock Factor 1 (HSF-1), the conserved master regulator of the heat shock response (HSR)- causes extensive fat remodelling in peripheral tissues. These changes include a decrease in fat desaturase and acid lipase expression in the intestine, and a global shift in the saturation levels of plasma membrane’s phospholipids. The observed remodelling of plasma membrane is in line with ectothermic adaptive responses and gives worms a cumulative advantage to warm temperatures. We have determined that at least six TAX-2/TAX-4 cGMP gated channel expressing sensory neurons and TGF-β/BMP are required for signalling across tissues to modulate fat desaturation. We also find neuronal hsf-1  is not only sufficient but also partially necessary to control the fat remodelling response and for survival at warm temperatures. This is the first study to show that a thermostat-based mechanism can cell non-autonomously coordinate membrane saturation and composition across tissues in a multicellular animal."}],"year":"2021","_id":"13069","oa":1,"title":"Neuronal HSF-1 coordinates the propagation of fat desaturation across tissues to enable adaptation to high temperatures in C. elegans","author":[{"full_name":"Chauve, Laetitia","last_name":"Chauve","first_name":"Laetitia"},{"first_name":"Francesca","full_name":"Hodge, Francesca","last_name":"Hodge"},{"first_name":"Sharlene","last_name":"Murdoch","full_name":"Murdoch, Sharlene"},{"first_name":"Fatemah","full_name":"Masoudzadeh, Fatemah","last_name":"Masoudzadeh"},{"full_name":"Mann, Harry-Jack","last_name":"Mann","first_name":"Harry-Jack"},{"full_name":"Lopez-Clavijo, Andrea","last_name":"Lopez-Clavijo","first_name":"Andrea"},{"last_name":"Okkenhaug","full_name":"Okkenhaug, Hanneke","first_name":"Hanneke"},{"last_name":"West","full_name":"West, Greg","first_name":"Greg"},{"last_name":"Sousa","full_name":"Sousa, Bebiana C.","first_name":"Bebiana C."},{"last_name":"Segonds-Pichon","full_name":"Segonds-Pichon, Anne","first_name":"Anne"},{"full_name":"Li, Cheryl","last_name":"Li","first_name":"Cheryl"},{"first_name":"Steven","full_name":"Wingett, Steven","last_name":"Wingett"},{"first_name":"Hermine","last_name":"Kienberger","full_name":"Kienberger, Hermine"},{"full_name":"Kleigrewe, Karin","last_name":"Kleigrewe","first_name":"Karin"},{"full_name":"de Bono, Mario","last_name":"de Bono","id":"4E3FF80E-F248-11E8-B48F-1D18A9856A87","first_name":"Mario","orcid":"0000-0001-8347-0443"},{"full_name":"Wakelam, Michael","last_name":"Wakelam","first_name":"Michael"},{"first_name":"Olivia","full_name":"Casanueva, Olivia","last_name":"Casanueva"}],"article_processing_charge":"No","type":"research_data_reference","publisher":"Zenodo","tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","image":"/images/cc_by.png","short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode"},"main_file_link":[{"url":"https://doi.org/10.5281/zenodo.5547464","open_access":"1"}],"oa_version":"Published Version","date_created":"2023-05-23T16:40:56Z","date_published":"2021-12-25T00:00:00Z","ddc":["570"],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","month":"12","citation":{"mla":"Chauve, Laetitia, et al. <i>Neuronal HSF-1 Coordinates the Propagation of Fat Desaturation across Tissues to Enable Adaptation to High Temperatures in C. Elegans</i>. Zenodo, 2021, doi:<a href=\"https://doi.org/10.5281/ZENODO.5519410\">10.5281/ZENODO.5519410</a>.","chicago":"Chauve, Laetitia, Francesca Hodge, Sharlene Murdoch, Fatemah Masoudzadeh, Harry-Jack Mann, Andrea Lopez-Clavijo, Hanneke Okkenhaug, et al. “Neuronal HSF-1 Coordinates the Propagation of Fat Desaturation across Tissues to Enable Adaptation to High Temperatures in C. Elegans.” Zenodo, 2021. <a href=\"https://doi.org/10.5281/ZENODO.5519410\">https://doi.org/10.5281/ZENODO.5519410</a>.","short":"L. Chauve, F. Hodge, S. Murdoch, F. Masoudzadeh, H.-J. Mann, A. Lopez-Clavijo, H. Okkenhaug, G. West, B.C. Sousa, A. Segonds-Pichon, C. Li, S. Wingett, H. Kienberger, K. Kleigrewe, M. de Bono, M. Wakelam, O. Casanueva, (2021).","ama":"Chauve L, Hodge F, Murdoch S, et al. Neuronal HSF-1 coordinates the propagation of fat desaturation across tissues to enable adaptation to high temperatures in C. elegans. 2021. doi:<a href=\"https://doi.org/10.5281/ZENODO.5519410\">10.5281/ZENODO.5519410</a>","ista":"Chauve L, Hodge F, Murdoch S, Masoudzadeh F, Mann H-J, Lopez-Clavijo A, Okkenhaug H, West G, Sousa BC, Segonds-Pichon A, Li C, Wingett S, Kienberger H, Kleigrewe K, de Bono M, Wakelam M, Casanueva O. 2021. Neuronal HSF-1 coordinates the propagation of fat desaturation across tissues to enable adaptation to high temperatures in C. elegans, Zenodo, <a href=\"https://doi.org/10.5281/ZENODO.5519410\">10.5281/ZENODO.5519410</a>.","apa":"Chauve, L., Hodge, F., Murdoch, S., Masoudzadeh, F., Mann, H.-J., Lopez-Clavijo, A., … Casanueva, O. (2021). Neuronal HSF-1 coordinates the propagation of fat desaturation across tissues to enable adaptation to high temperatures in C. elegans. Zenodo. <a href=\"https://doi.org/10.5281/ZENODO.5519410\">https://doi.org/10.5281/ZENODO.5519410</a>","ieee":"L. Chauve <i>et al.</i>, “Neuronal HSF-1 coordinates the propagation of fat desaturation across tissues to enable adaptation to high temperatures in C. elegans.” Zenodo, 2021."},"day":"25","department":[{"_id":"MaDe"}]},{"type":"research_data_reference","article_processing_charge":"No","tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","image":"/images/cc_by.png","short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode"},"main_file_link":[{"open_access":"1","url":"https://doi.org/10.5281/zenodo.5794029"}],"publisher":"Zenodo","date_created":"2023-05-23T16:46:20Z","oa_version":"Published Version","date_published":"2021-12-20T00:00:00Z","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","ddc":["570"],"month":"12","department":[{"_id":"MaRo"}],"day":"20","citation":{"ama":"McCartney DL, Hillary RF, Conole EL, et al. Blood-based epigenome-wide analyses of cognitive abilities. 2021. doi:<a href=\"https://doi.org/10.5281/ZENODO.5794028\">10.5281/ZENODO.5794028</a>","ista":"McCartney DL, Hillary RF, Conole EL, Trejo Banos D, Gadd DA, Walker RM, Nangle C, Flaig R, Campbell A, Murray AD, Munoz Maniega S, del C Valdes-Hernandez M, Harris MA, Bastin ME, Wardlaw JM, Harris SE, Porteous DJ, Tucker-Drob EM, McIntosh AM, Evans KL, Deary IJ, Cox SR, Robinson MR, Marioni RE. 2021. Blood-based epigenome-wide analyses of cognitive abilities, Zenodo, <a href=\"https://doi.org/10.5281/ZENODO.5794028\">10.5281/ZENODO.5794028</a>.","short":"D.L. McCartney, R.F. Hillary, E.L. Conole, D. Trejo Banos, D.A. Gadd, R.M. Walker, C. Nangle, R. Flaig, A. Campbell, A.D. Murray, S. Munoz Maniega, M. del C Valdes-Hernandez, M.A. Harris, M.E. Bastin, J.M. Wardlaw, S.E. Harris, D.J. Porteous, E.M. Tucker-Drob, A.M. McIntosh, K.L. Evans, I.J. Deary, S.R. Cox, M.R. Robinson, R.E. Marioni, (2021).","ieee":"D. L. McCartney <i>et al.</i>, “Blood-based epigenome-wide analyses of cognitive abilities.” Zenodo, 2021.","apa":"McCartney, D. L., Hillary, R. F., Conole, E. L., Trejo Banos, D., Gadd, D. A., Walker, R. M., … Marioni, R. E. (2021). Blood-based epigenome-wide analyses of cognitive abilities. Zenodo. <a href=\"https://doi.org/10.5281/ZENODO.5794028\">https://doi.org/10.5281/ZENODO.5794028</a>","mla":"McCartney, Daniel L., et al. <i>Blood-Based Epigenome-Wide Analyses of Cognitive Abilities</i>. Zenodo, 2021, doi:<a href=\"https://doi.org/10.5281/ZENODO.5794028\">10.5281/ZENODO.5794028</a>.","chicago":"McCartney, Daniel L, Robert F Hillary, Eleanor LS Conole, Daniel Trejo Banos, Danni A Gadd, Rosie M Walker, Cliff Nangle, et al. “Blood-Based Epigenome-Wide Analyses of Cognitive Abilities.” Zenodo, 2021. <a href=\"https://doi.org/10.5281/ZENODO.5794028\">https://doi.org/10.5281/ZENODO.5794028</a>."},"status":"public","related_material":{"record":[{"id":"10702","status":"public","relation":"used_in_publication"}]},"date_updated":"2023-08-02T14:05:12Z","doi":"10.5281/ZENODO.5794028","year":"2021","abstract":[{"lang":"eng","text":"CpGs and corresponding mean weights for DNAm-based prediction of cognitive abilities (6 traits)"}],"_id":"13072","author":[{"first_name":"Daniel L","last_name":"McCartney","full_name":"McCartney, Daniel L"},{"first_name":"Robert F","last_name":"Hillary","full_name":"Hillary, Robert F"},{"first_name":"Eleanor LS","full_name":"Conole, Eleanor LS","last_name":"Conole"},{"first_name":"Daniel","last_name":"Trejo Banos","full_name":"Trejo Banos, Daniel"},{"first_name":"Danni A","full_name":"Gadd, Danni A","last_name":"Gadd"},{"first_name":"Rosie M","full_name":"Walker, Rosie M","last_name":"Walker"},{"full_name":"Nangle, Cliff","last_name":"Nangle","first_name":"Cliff"},{"first_name":"Robin","full_name":"Flaig, Robin","last_name":"Flaig"},{"last_name":"Campbell","full_name":"Campbell, Archie","first_name":"Archie"},{"last_name":"Murray","full_name":"Murray, Alison D","first_name":"Alison D"},{"full_name":"Munoz Maniega, Susana","last_name":"Munoz Maniega","first_name":"Susana"},{"full_name":"del C Valdes-Hernandez, Maria","last_name":"del C Valdes-Hernandez","first_name":"Maria"},{"last_name":"Harris","full_name":"Harris, Mathew A","first_name":"Mathew A"},{"first_name":"Mark E","full_name":"Bastin, Mark E","last_name":"Bastin"},{"full_name":"Wardlaw, Joanna M","last_name":"Wardlaw","first_name":"Joanna M"},{"last_name":"Harris","full_name":"Harris, Sarah E","first_name":"Sarah E"},{"full_name":"Porteous, David J","last_name":"Porteous","first_name":"David J"},{"first_name":"Elliot M","full_name":"Tucker-Drob, Elliot M","last_name":"Tucker-Drob"},{"first_name":"Andrew M","full_name":"McIntosh, Andrew M","last_name":"McIntosh"},{"first_name":"Kathryn L","last_name":"Evans","full_name":"Evans, Kathryn L"},{"first_name":"Ian J","full_name":"Deary, Ian J","last_name":"Deary"},{"full_name":"Cox, Simon R","last_name":"Cox","first_name":"Simon R"},{"first_name":"Matthew Richard","id":"E5D42276-F5DA-11E9-8E24-6303E6697425","last_name":"Robinson","full_name":"Robinson, Matthew Richard","orcid":"0000-0001-8982-8813"},{"first_name":"Riccardo E","last_name":"Marioni","full_name":"Marioni, Riccardo E"}],"title":"Blood-based epigenome-wide analyses of cognitive abilities","oa":1},{"doi":"10.5281/ZENODO.4592435","abstract":[{"text":"Data for the manuscript 'Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire' ([2006.01275] Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire (arxiv.org))\r\n\r\nWe upload a pdf with extended data sets, and the raw data for these extended datasets as well.","lang":"eng"}],"year":"2021","_id":"13080","title":"Data for 'Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire","oa":1,"author":[{"last_name":"Puglia","full_name":"Puglia, Denise","first_name":"Denise","id":"4D495994-AE37-11E9-AC72-31CAE5697425"},{"last_name":"Martinez","full_name":"Martinez, Esteban","first_name":"Esteban"},{"full_name":"Menard, Gerbold","last_name":"Menard","first_name":"Gerbold"},{"full_name":"Pöschl, Andreas","last_name":"Pöschl","first_name":"Andreas"},{"first_name":"Sergei","last_name":"Gronin","full_name":"Gronin, Sergei"},{"last_name":"Gardner","full_name":"Gardner, Geoffrey","first_name":"Geoffrey"},{"first_name":"Ray","last_name":"Kallaher","full_name":"Kallaher, Ray"},{"full_name":"Manfra, Michael","last_name":"Manfra","first_name":"Michael"},{"first_name":"Charles","full_name":"Marcus, Charles","last_name":"Marcus"},{"orcid":"0000-0003-2607-2363","first_name":"Andrew P","id":"4AD6785A-F248-11E8-B48F-1D18A9856A87","last_name":"Higginbotham","full_name":"Higginbotham, Andrew P"},{"last_name":"Casparis","full_name":"Casparis, Lucas","first_name":"Lucas"}],"related_material":{"link":[{"relation":"software","url":"https://github.com/caslu85/Induced-Gap-Closing-Shared/tree/1.1.3"}],"record":[{"id":"9570","status":"public","relation":"used_in_publication"}]},"status":"public","date_updated":"2023-08-08T14:08:07Z","oa_version":"Published Version","date_created":"2023-05-23T17:11:28Z","date_published":"2021-03-09T00:00:00Z","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","month":"03","ddc":["530"],"citation":{"mla":"Puglia, Denise, et al. <i>Data for ’Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire</i>. Zenodo, 2021, doi:<a href=\"https://doi.org/10.5281/ZENODO.4592435\">10.5281/ZENODO.4592435</a>.","chicago":"Puglia, Denise, Esteban Martinez, Gerbold Menard, Andreas Pöschl, Sergei Gronin, Geoffrey Gardner, Ray Kallaher, et al. “Data for ’Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire.” Zenodo, 2021. <a href=\"https://doi.org/10.5281/ZENODO.4592435\">https://doi.org/10.5281/ZENODO.4592435</a>.","ama":"Puglia D, Martinez E, Menard G, et al. Data for ’Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire. 2021. doi:<a href=\"https://doi.org/10.5281/ZENODO.4592435\">10.5281/ZENODO.4592435</a>","ista":"Puglia D, Martinez E, Menard G, Pöschl A, Gronin S, Gardner G, Kallaher R, Manfra M, Marcus C, Higginbotham AP, Casparis L. 2021. Data for ’Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire, Zenodo, <a href=\"https://doi.org/10.5281/ZENODO.4592435\">10.5281/ZENODO.4592435</a>.","short":"D. Puglia, E. Martinez, G. Menard, A. Pöschl, S. Gronin, G. Gardner, R. Kallaher, M. Manfra, C. Marcus, A.P. Higginbotham, L. Casparis, (2021).","apa":"Puglia, D., Martinez, E., Menard, G., Pöschl, A., Gronin, S., Gardner, G., … Casparis, L. (2021). Data for ’Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire. Zenodo. <a href=\"https://doi.org/10.5281/ZENODO.4592435\">https://doi.org/10.5281/ZENODO.4592435</a>","ieee":"D. Puglia <i>et al.</i>, “Data for ’Closing of the Induced Gap in a Hybrid Superconductor-Semiconductor Nanowire.” Zenodo, 2021."},"day":"09","department":[{"_id":"AnHi"}],"article_processing_charge":"No","type":"research_data_reference","publisher":"Zenodo","main_file_link":[{"url":"https://doi.org/10.5281/zenodo.4592460","open_access":"1"}]},{"date_published":"2021-07-01T00:00:00Z","external_id":{"arxiv":["2012.11654"]},"scopus_import":"1","date_created":"2023-06-18T22:00:48Z","department":[{"_id":"MaMo"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","type":"conference","language":[{"iso":"eng"}],"quality_controlled":"1","intvolume":"       139","publisher":"ML Research Press","year":"2021","oa":1,"page":"8119-8129","publication":"Proceedings of the 38th International Conference on Machine Learning","date_updated":"2024-09-10T13:03:17Z","conference":{"name":"International Conference on Machine Learning","start_date":"2021-07-18","location":"Virtual","end_date":"2021-07-24"},"oa_version":"Published Version","day":"01","citation":{"apa":"Nguyen, Q., Mondelli, M., &#38; Montufar, G. (2021). Tight bounds on the smallest Eigenvalue of the neural tangent kernel for deep ReLU networks. In <i>Proceedings of the 38th International Conference on Machine Learning</i> (Vol. 139, pp. 8119–8129). Virtual: ML Research Press.","ieee":"Q. Nguyen, M. Mondelli, and G. Montufar, “Tight bounds on the smallest Eigenvalue of the neural tangent kernel for deep ReLU networks,” in <i>Proceedings of the 38th International Conference on Machine Learning</i>, Virtual, 2021, vol. 139, pp. 8119–8129.","ama":"Nguyen Q, Mondelli M, Montufar G. Tight bounds on the smallest Eigenvalue of the neural tangent kernel for deep ReLU networks. In: <i>Proceedings of the 38th International Conference on Machine Learning</i>. Vol 139. ML Research Press; 2021:8119-8129.","ista":"Nguyen Q, Mondelli M, Montufar G. 2021. Tight bounds on the smallest Eigenvalue of the neural tangent kernel for deep ReLU networks. Proceedings of the 38th International Conference on Machine Learning. International Conference on Machine Learning vol. 139, 8119–8129.","short":"Q. Nguyen, M. Mondelli, G. Montufar, in:, Proceedings of the 38th International Conference on Machine Learning, ML Research Press, 2021, pp. 8119–8129.","chicago":"Nguyen, Quynh, Marco Mondelli, and Guido Montufar. “Tight Bounds on the Smallest Eigenvalue of the Neural Tangent Kernel for Deep ReLU Networks.” In <i>Proceedings of the 38th International Conference on Machine Learning</i>, 139:8119–29. ML Research Press, 2021.","mla":"Nguyen, Quynh, et al. “Tight Bounds on the Smallest Eigenvalue of the Neural Tangent Kernel for Deep ReLU Networks.” <i>Proceedings of the 38th International Conference on Machine Learning</i>, vol. 139, ML Research Press, 2021, pp. 8119–29."},"month":"07","file_date_updated":"2023-06-19T10:49:12Z","ddc":["000"],"has_accepted_license":"1","article_processing_charge":"No","volume":139,"tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","image":"/images/cc_by.png","short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode"},"file":[{"file_id":"13155","date_created":"2023-06-19T10:49:12Z","file_name":"2021_PMLR_Nguyen.pdf","relation":"main_file","creator":"dernst","file_size":591332,"access_level":"open_access","content_type":"application/pdf","checksum":"19489cf5e16a0596b1f92e317d97c9b0","success":1,"date_updated":"2023-06-19T10:49:12Z"}],"arxiv":1,"abstract":[{"text":"A recent line of work has analyzed the theoretical properties of deep neural networks via the Neural Tangent Kernel (NTK). In particular, the smallest eigenvalue of the NTK has been related to the memorization capacity, the global convergence of gradient descent algorithms and the generalization of deep nets. However, existing results either provide bounds in the two-layer setting or assume that the spectrum of the NTK matrices is bounded away from 0 for multi-layer networks. In this paper, we provide tight bounds on the smallest eigenvalue of NTK matrices for deep ReLU nets, both in the limiting case of infinite widths and for finite widths. In the finite-width setting, the network architectures we consider are fairly general: we require the existence of a wide layer with roughly order of N neurons, N being the number of data samples; and the scaling of the remaining layer widths is arbitrary (up to logarithmic factors). To obtain our results, we analyze various quantities of independent interest: we give lower bounds on the smallest singular value of hidden feature matrices, and upper bounds on the Lipschitz constant of input-output feature maps.","lang":"eng"}],"author":[{"first_name":"Quynh","full_name":"Nguyen, Quynh","last_name":"Nguyen"},{"last_name":"Mondelli","full_name":"Mondelli, Marco","first_name":"Marco","id":"27EB676C-8706-11E9-9510-7717E6697425","orcid":"0000-0002-3242-7020"},{"first_name":"Guido","last_name":"Montufar","full_name":"Montufar, Guido"}],"title":"Tight bounds on the smallest Eigenvalue of the neural tangent kernel for deep ReLU networks","_id":"13146","acknowledgement":"The authors would like to thank the anonymous reviewers for their helpful comments. MM was partially supported by the 2019 Lopez-Loreta Prize. QN and GM acknowledge support from the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement no 757983).","status":"public","publication_status":"published","publication_identifier":{"eissn":["2640-3498"],"isbn":["9781713845065"]},"project":[{"_id":"059876FA-7A3F-11EA-A408-12923DDC885E","name":"Prix Lopez-Loretta 2019 - Marco Mondelli"}]},{"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","department":[{"_id":"DaAl"}],"date_created":"2023-06-18T22:00:48Z","scopus_import":"1","date_published":"2021-07-01T00:00:00Z","external_id":{"arxiv":["2102.07214"]},"publisher":"ML Research Press","intvolume":"       139","quality_controlled":"1","type":"conference","language":[{"iso":"eng"}],"oa":1,"year":"2021","date_updated":"2023-06-19T10:44:38Z","publication":"Proceedings of the 38th International Conference on Machine Learning","page":"196-206","month":"07","file_date_updated":"2023-06-19T10:41:05Z","ddc":["000"],"citation":{"mla":"Alimisis, Foivos, et al. “Communication-Efficient Distributed Optimization with Quantized Preconditioners.” <i>Proceedings of the 38th International Conference on Machine Learning</i>, vol. 139, ML Research Press, 2021, pp. 196–206.","chicago":"Alimisis, Foivos, Peter Davies, and Dan-Adrian Alistarh. “Communication-Efficient Distributed Optimization with Quantized Preconditioners.” In <i>Proceedings of the 38th International Conference on Machine Learning</i>, 139:196–206. ML Research Press, 2021.","apa":"Alimisis, F., Davies, P., &#38; Alistarh, D.-A. (2021). Communication-efficient distributed optimization with quantized preconditioners. In <i>Proceedings of the 38th International Conference on Machine Learning</i> (Vol. 139, pp. 196–206). Virtual: ML Research Press.","ieee":"F. Alimisis, P. Davies, and D.-A. Alistarh, “Communication-efficient distributed optimization with quantized preconditioners,” in <i>Proceedings of the 38th International Conference on Machine Learning</i>, Virtual, 2021, vol. 139, pp. 196–206.","short":"F. Alimisis, P. Davies, D.-A. Alistarh, in:, Proceedings of the 38th International Conference on Machine Learning, ML Research Press, 2021, pp. 196–206.","ista":"Alimisis F, Davies P, Alistarh D-A. 2021. Communication-efficient distributed optimization with quantized preconditioners. Proceedings of the 38th International Conference on Machine Learning. International Conference on Machine Learning vol. 139, 196–206.","ama":"Alimisis F, Davies P, Alistarh D-A. Communication-efficient distributed optimization with quantized preconditioners. In: <i>Proceedings of the 38th International Conference on Machine Learning</i>. Vol 139. ML Research Press; 2021:196-206."},"day":"01","oa_version":"Published Version","conference":{"end_date":"2021-07-24","location":"Virtual","start_date":"2021-07-18","name":"International Conference on Machine Learning"},"arxiv":1,"tmp":{"name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","image":"/images/cc_by.png","short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode"},"file":[{"date_created":"2023-06-19T10:41:05Z","file_id":"13154","relation":"main_file","file_size":429087,"creator":"dernst","file_name":"2021_PMLR_Alimisis.pdf","checksum":"7ec0d59bac268b49c76bf2e036dedd7a","content_type":"application/pdf","access_level":"open_access","date_updated":"2023-06-19T10:41:05Z","success":1}],"volume":139,"article_processing_charge":"No","has_accepted_license":"1","_id":"13147","title":"Communication-efficient distributed optimization with quantized preconditioners","author":[{"first_name":"Foivos","last_name":"Alimisis","full_name":"Alimisis, Foivos"},{"orcid":"0000-0002-5646-9524","first_name":"Peter","id":"11396234-BB50-11E9-B24C-90FCE5697425","last_name":"Davies","full_name":"Davies, Peter"},{"first_name":"Dan-Adrian","id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","last_name":"Alistarh","full_name":"Alistarh, Dan-Adrian","orcid":"0000-0003-3650-940X"}],"abstract":[{"text":"We investigate fast and communication-efficient algorithms for the classic problem of minimizing a sum of strongly convex and smooth functions that are distributed among n\r\n different nodes, which can communicate using a limited number of bits. Most previous communication-efficient approaches for this problem are limited to first-order optimization, and therefore have \\emph{linear} dependence on the condition number in their communication complexity. We show that this dependence is not inherent: communication-efficient methods can in fact have sublinear dependence on the condition number. For this, we design and analyze the first communication-efficient distributed variants of preconditioned gradient descent for Generalized Linear Models, and for Newton’s method. Our results rely on a new technique for quantizing both the preconditioner and the descent direction at each step of the algorithms, while controlling their convergence rate. We also validate our findings experimentally, showing faster convergence and reduced communication relative to previous methods.","lang":"eng"}],"project":[{"call_identifier":"H2020","name":"Elastic Coordination for Scalable Machine Learning","_id":"268A44D6-B435-11E9-9278-68D0E5697425","grant_number":"805223"},{"_id":"260C2330-B435-11E9-9278-68D0E5697425","grant_number":"754411","call_identifier":"H2020","name":"ISTplus - Postdoctoral Fellowships"}],"publication_identifier":{"isbn":["9781713845065"],"eissn":["2640-3498"]},"ec_funded":1,"publication_status":"published","status":"public","acknowledgement":"The authors would like to thank Janne Korhonen, Aurelien Lucchi, Celestine MendlerDunner and Antonio Orvieto for helpful discussions. FA ¨and DA were supported during this work by the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement No 805223 ScaleML). PD was supported by the European Union’s Horizon 2020 programme under the Marie Skłodowska-Curie grant agreement No. 754411."},{"main_file_link":[{"url":"https://doi.org/10.1109/JPROC.2021.3058954","open_access":"1"}],"intvolume":"       109","publisher":"Institute of Electrical and Electronics Engineers","language":[{"iso":"eng"}],"type":"journal_article","quality_controlled":"1","department":[{"_id":"FrLo"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","extern":"1","date_published":"2021-05-01T00:00:00Z","external_id":{"arxiv":["2102.11107"]},"scopus_import":"1","date_created":"2023-08-21T12:19:30Z","publication":"Proceedings of the IEEE","date_updated":"2023-09-11T11:43:35Z","article_type":"original","page":"612-634","oa":1,"year":"2021","keyword":["Electrical and Electronic Engineering"],"arxiv":1,"article_processing_charge":"No","volume":109,"day":"01","citation":{"ieee":"B. Scholkopf <i>et al.</i>, “Toward causal representation learning,” <i>Proceedings of the IEEE</i>, vol. 109, no. 5. Institute of Electrical and Electronics Engineers, pp. 612–634, 2021.","apa":"Scholkopf, B., Locatello, F., Bauer, S., Ke, N. R., Kalchbrenner, N., Goyal, A., &#38; Bengio, Y. (2021). Toward causal representation learning. <i>Proceedings of the IEEE</i>. Institute of Electrical and Electronics Engineers. <a href=\"https://doi.org/10.1109/jproc.2021.3058954\">https://doi.org/10.1109/jproc.2021.3058954</a>","ama":"Scholkopf B, Locatello F, Bauer S, et al. Toward causal representation learning. <i>Proceedings of the IEEE</i>. 2021;109(5):612-634. doi:<a href=\"https://doi.org/10.1109/jproc.2021.3058954\">10.1109/jproc.2021.3058954</a>","ista":"Scholkopf B, Locatello F, Bauer S, Ke NR, Kalchbrenner N, Goyal A, Bengio Y. 2021. Toward causal representation learning. Proceedings of the IEEE. 109(5), 612–634.","short":"B. Scholkopf, F. Locatello, S. Bauer, N.R. Ke, N. Kalchbrenner, A. Goyal, Y. Bengio, Proceedings of the IEEE 109 (2021) 612–634.","chicago":"Scholkopf, Bernhard, Francesco Locatello, Stefan Bauer, Nan Rosemary Ke, Nal Kalchbrenner, Anirudh Goyal, and Yoshua Bengio. “Toward Causal Representation Learning.” <i>Proceedings of the IEEE</i>. Institute of Electrical and Electronics Engineers, 2021. <a href=\"https://doi.org/10.1109/jproc.2021.3058954\">https://doi.org/10.1109/jproc.2021.3058954</a>.","mla":"Scholkopf, Bernhard, et al. “Toward Causal Representation Learning.” <i>Proceedings of the IEEE</i>, vol. 109, no. 5, Institute of Electrical and Electronics Engineers, 2021, pp. 612–34, doi:<a href=\"https://doi.org/10.1109/jproc.2021.3058954\">10.1109/jproc.2021.3058954</a>."},"month":"05","oa_version":"Published Version","publication_identifier":{"eissn":["1558-2256"],"issn":["0018-9219"]},"status":"public","publication_status":"published","author":[{"last_name":"Scholkopf","full_name":"Scholkopf, Bernhard","first_name":"Bernhard"},{"first_name":"Francesco","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","last_name":"Locatello","full_name":"Locatello, Francesco","orcid":"0000-0002-4850-0683"},{"full_name":"Bauer, Stefan","last_name":"Bauer","first_name":"Stefan"},{"first_name":"Nan Rosemary","full_name":"Ke, Nan Rosemary","last_name":"Ke"},{"full_name":"Kalchbrenner, Nal","last_name":"Kalchbrenner","first_name":"Nal"},{"first_name":"Anirudh","full_name":"Goyal, Anirudh","last_name":"Goyal"},{"last_name":"Bengio","full_name":"Bengio, Yoshua","first_name":"Yoshua"}],"title":"Toward causal representation learning","_id":"14117","issue":"5","abstract":[{"lang":"eng","text":"The two fields of machine learning and graphical causality arose and are developed separately. However, there is, now, cross-pollination and increasing interest in both fields to benefit from the advances of the other. In this article, we review fundamental concepts of causal inference and relate them to crucial open problems of machine learning, including transfer and generalization, thereby assaying how causality can contribute to modern machine learning research. This also applies in the opposite direction: we note that most work in causality starts from the premise that the causal variables are given. A central problem for AI and causality is, thus, causal representation learning, that is, the discovery of high-level causal variables from low-level observations. Finally, we delineate some implications of causality for machine learning and propose key research areas at the intersection of both communities."}],"doi":"10.1109/jproc.2021.3058954"},{"page":"11964-11974","publication":"Proceedings of 38th International Conference on Machine Learning","date_updated":"2023-09-11T10:16:55Z","alternative_title":["PMLR"],"year":"2021","oa":1,"quality_controlled":"1","language":[{"iso":"eng"}],"type":"conference","publisher":"ML Research Press","intvolume":"       139","main_file_link":[{"open_access":"1","url":"https://arxiv.org/abs/2106.05142"}],"extern":"1","external_id":{"arxiv":["2106.05142"]},"date_published":"2021-08-01T00:00:00Z","date_created":"2023-08-22T14:03:04Z","scopus_import":"1","department":[{"_id":"FrLo"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","publication_status":"published","status":"public","abstract":[{"lang":"eng","text":"Intensive care units (ICU) are increasingly looking towards machine learning for methods to provide online monitoring of critically ill patients. In machine learning, online monitoring is often formulated as a supervised learning problem. Recently, contrastive learning approaches have demonstrated promising improvements over competitive supervised benchmarks. These methods rely on well-understood data augmentation techniques developed for image data which do not apply to online monitoring. In this work, we overcome this limitation by\r\nsupplementing time-series data augmentation techniques with a novel contrastive\r\nlearning objective which we call neighborhood contrastive learning (NCL). Our objective explicitly groups together contiguous time segments from each patient while maintaining state-specific information. Our experiments demonstrate a marked improvement over existing work applying contrastive methods to medical time-series."}],"title":"Neighborhood contrastive learning applied to online patient monitoring","author":[{"first_name":"Hugo","full_name":"Yèche, Hugo","last_name":"Yèche"},{"first_name":"Gideon","last_name":"Dresdner","full_name":"Dresdner, Gideon"},{"last_name":"Locatello","full_name":"Locatello, Francesco","first_name":"Francesco","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","orcid":"0000-0002-4850-0683"},{"full_name":"Hüser, Matthias","last_name":"Hüser","first_name":"Matthias"},{"last_name":"Rätsch","full_name":"Rätsch, Gunnar","first_name":"Gunnar"}],"_id":"14176","volume":139,"article_processing_charge":"No","arxiv":1,"oa_version":"Preprint","conference":{"end_date":"2021-07-24","location":"Virtual","start_date":"2021-07-18","name":"International Conference on Machine Learning"},"citation":{"ama":"Yèche H, Dresdner G, Locatello F, Hüser M, Rätsch G. Neighborhood contrastive learning applied to online patient monitoring. In: <i>Proceedings of 38th International Conference on Machine Learning</i>. Vol 139. ML Research Press; 2021:11964-11974.","ista":"Yèche H, Dresdner G, Locatello F, Hüser M, Rätsch G. 2021. Neighborhood contrastive learning applied to online patient monitoring. Proceedings of 38th International Conference on Machine Learning. International Conference on Machine Learning, PMLR, vol. 139, 11964–11974.","short":"H. Yèche, G. Dresdner, F. Locatello, M. Hüser, G. Rätsch, in:, Proceedings of 38th International Conference on Machine Learning, ML Research Press, 2021, pp. 11964–11974.","ieee":"H. Yèche, G. Dresdner, F. Locatello, M. Hüser, and G. Rätsch, “Neighborhood contrastive learning applied to online patient monitoring,” in <i>Proceedings of 38th International Conference on Machine Learning</i>, Virtual, 2021, vol. 139, pp. 11964–11974.","apa":"Yèche, H., Dresdner, G., Locatello, F., Hüser, M., &#38; Rätsch, G. (2021). Neighborhood contrastive learning applied to online patient monitoring. In <i>Proceedings of 38th International Conference on Machine Learning</i> (Vol. 139, pp. 11964–11974). Virtual: ML Research Press.","mla":"Yèche, Hugo, et al. “Neighborhood Contrastive Learning Applied to Online Patient Monitoring.” <i>Proceedings of 38th International Conference on Machine Learning</i>, vol. 139, ML Research Press, 2021, pp. 11964–74.","chicago":"Yèche, Hugo, Gideon Dresdner, Francesco Locatello, Matthias Hüser, and Gunnar Rätsch. “Neighborhood Contrastive Learning Applied to Online Patient Monitoring.” In <i>Proceedings of 38th International Conference on Machine Learning</i>, 139:11964–74. ML Research Press, 2021."},"day":"01","month":"08"},{"date_updated":"2023-09-11T10:18:48Z","publication":"Proceedings of the 38th International Conference on Machine Learning","page":"10401-10412","oa":1,"alternative_title":["PMLR"],"year":"2021","intvolume":"       139","main_file_link":[{"url":"https://arxiv.org/abs/2006.07886","open_access":"1"}],"publisher":"ML Research Press","language":[{"iso":"eng"}],"quality_controlled":"1","type":"conference","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","department":[{"_id":"FrLo"}],"scopus_import":"1","date_created":"2023-08-22T14:03:47Z","date_published":"2021-08-01T00:00:00Z","extern":"1","external_id":{"arxiv":["2006.07886"]},"publication_status":"published","status":"public","_id":"14177","author":[{"first_name":"Frederik","last_name":"Träuble","full_name":"Träuble, Frederik"},{"first_name":"Elliot","full_name":"Creager, Elliot","last_name":"Creager"},{"full_name":"Kilbertus, Niki","last_name":"Kilbertus","first_name":"Niki"},{"orcid":"0000-0002-4850-0683","first_name":"Francesco","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","last_name":"Locatello","full_name":"Locatello, Francesco"},{"last_name":"Dittadi","full_name":"Dittadi, Andrea","first_name":"Andrea"},{"full_name":"Goyal, Anirudh","last_name":"Goyal","first_name":"Anirudh"},{"full_name":"Schölkopf, Bernhard","last_name":"Schölkopf","first_name":"Bernhard"},{"first_name":"Stefan","full_name":"Bauer, Stefan","last_name":"Bauer"}],"title":"On disentangled representations learned from correlated data","abstract":[{"lang":"eng","text":"The focus of disentanglement approaches has been on identifying independent factors of variation in data. However, the causal variables underlying real-world observations are often not statistically independent. In this work, we bridge the gap to real-world scenarios by analyzing the behavior of the most prominent disentanglement approaches on correlated data in a large-scale empirical study (including 4260 models). We show and quantify that systematically induced correlations in the dataset are being learned and reflected in the latent representations, which has implications for downstream applications of disentanglement such as fairness. We also demonstrate how to resolve these latent correlations, either using weak supervision during\r\ntraining or by post-hoc correcting a pre-trained model with a small number of labels."}],"arxiv":1,"volume":139,"article_processing_charge":"No","month":"08","day":"01","citation":{"mla":"Träuble, Frederik, et al. “On Disentangled Representations Learned from Correlated Data.” <i>Proceedings of the 38th International Conference on Machine Learning</i>, vol. 139, ML Research Press, 2021, pp. 10401–12.","chicago":"Träuble, Frederik, Elliot Creager, Niki Kilbertus, Francesco Locatello, Andrea Dittadi, Anirudh Goyal, Bernhard Schölkopf, and Stefan Bauer. “On Disentangled Representations Learned from Correlated Data.” In <i>Proceedings of the 38th International Conference on Machine Learning</i>, 139:10401–12. ML Research Press, 2021.","apa":"Träuble, F., Creager, E., Kilbertus, N., Locatello, F., Dittadi, A., Goyal, A., … Bauer, S. (2021). On disentangled representations learned from correlated data. In <i>Proceedings of the 38th International Conference on Machine Learning</i> (Vol. 139, pp. 10401–10412). Virtual: ML Research Press.","ieee":"F. Träuble <i>et al.</i>, “On disentangled representations learned from correlated data,” in <i>Proceedings of the 38th International Conference on Machine Learning</i>, Virtual, 2021, vol. 139, pp. 10401–10412.","ista":"Träuble F, Creager E, Kilbertus N, Locatello F, Dittadi A, Goyal A, Schölkopf B, Bauer S. 2021. On disentangled representations learned from correlated data. Proceedings of the 38th International Conference on Machine Learning. ICML: International Conference on Machine Learning, PMLR, vol. 139, 10401–10412.","short":"F. Träuble, E. Creager, N. Kilbertus, F. Locatello, A. Dittadi, A. Goyal, B. Schölkopf, S. Bauer, in:, Proceedings of the 38th International Conference on Machine Learning, ML Research Press, 2021, pp. 10401–10412.","ama":"Träuble F, Creager E, Kilbertus N, et al. On disentangled representations learned from correlated data. In: <i>Proceedings of the 38th International Conference on Machine Learning</i>. Vol 139. ML Research Press; 2021:10401-10412."},"conference":{"location":"Virtual","start_date":"2021-07-18","end_date":"2021-07-24","name":"ICML: International Conference on Machine Learning"},"oa_version":"Published Version"},{"conference":{"name":"ICLR: International Conference on Learning Representations","start_date":"2021-05-03","location":"Virtual","end_date":"2021-05-07"},"date_created":"2023-08-22T14:04:16Z","oa_version":"Preprint","extern":"1","external_id":{"arxiv":["2010.14407"]},"date_published":"2021-05-04T00:00:00Z","month":"05","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","day":"04","department":[{"_id":"FrLo"}],"citation":{"ama":"Dittadi A, Träuble F, Locatello F, et al. On the transfer of disentangled representations in realistic settings. In: <i>The Ninth International Conference on Learning Representations</i>. ; 2021.","short":"A. Dittadi, F. Träuble, F. Locatello, M. Wüthrich, V. Agrawal, O. Winther, S. Bauer, B. Schölkopf, in:, The Ninth International Conference on Learning Representations, 2021.","ista":"Dittadi A, Träuble F, Locatello F, Wüthrich M, Agrawal V, Winther O, Bauer S, Schölkopf B. 2021. On the transfer of disentangled representations in realistic settings. The Ninth International Conference on Learning Representations. ICLR: International Conference on Learning Representations.","ieee":"A. Dittadi <i>et al.</i>, “On the transfer of disentangled representations in realistic settings,” in <i>The Ninth International Conference on Learning Representations</i>, Virtual, 2021.","apa":"Dittadi, A., Träuble, F., Locatello, F., Wüthrich, M., Agrawal, V., Winther, O., … Schölkopf, B. (2021). On the transfer of disentangled representations in realistic settings. In <i>The Ninth International Conference on Learning Representations</i>. Virtual.","chicago":"Dittadi, Andrea, Frederik Träuble, Francesco Locatello, Manuel Wüthrich, Vaibhav Agrawal, Ole Winther, Stefan Bauer, and Bernhard Schölkopf. “On the Transfer of Disentangled Representations in Realistic Settings.” In <i>The Ninth International Conference on Learning Representations</i>, 2021.","mla":"Dittadi, Andrea, et al. “On the Transfer of Disentangled Representations in Realistic Settings.” <i>The Ninth International Conference on Learning Representations</i>, 2021."},"type":"conference","language":[{"iso":"eng"}],"quality_controlled":"1","article_processing_charge":"No","arxiv":1,"main_file_link":[{"url":"https://arxiv.org/abs/2010.14407","open_access":"1"}],"year":"2021","abstract":[{"lang":"eng","text":"Learning meaningful representations that disentangle the underlying structure of the data generating process is considered to be of key importance in machine learning. While disentangled representations were found to be useful for diverse tasks such as abstract reasoning and fair classification, their scalability and real-world impact remain questionable. We introduce a new high-resolution dataset with 1M simulated images and over 1,800 annotated real-world images of the same setup. In contrast to previous work, this new dataset exhibits correlations, a complex underlying structure, and allows to evaluate transfer to unseen simulated and real-world settings where the encoder i) remains in distribution or ii) is out of distribution. We propose new architectures in order to scale disentangled representation learning to realistic high-resolution settings and conduct a large-scale empirical study of disentangled representations on this dataset. We observe that disentanglement is a good predictor for out-of-distribution (OOD) task performance."}],"_id":"14178","author":[{"last_name":"Dittadi","full_name":"Dittadi, Andrea","first_name":"Andrea"},{"full_name":"Träuble, Frederik","last_name":"Träuble","first_name":"Frederik"},{"orcid":"0000-0002-4850-0683","full_name":"Locatello, Francesco","last_name":"Locatello","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","first_name":"Francesco"},{"last_name":"Wüthrich","full_name":"Wüthrich, Manuel","first_name":"Manuel"},{"full_name":"Agrawal, Vaibhav","last_name":"Agrawal","first_name":"Vaibhav"},{"last_name":"Winther","full_name":"Winther, Ole","first_name":"Ole"},{"full_name":"Bauer, Stefan","last_name":"Bauer","first_name":"Stefan"},{"full_name":"Schölkopf, Bernhard","last_name":"Schölkopf","first_name":"Bernhard"}],"oa":1,"title":"On the transfer of disentangled representations in realistic settings","status":"public","publication_status":"published","date_updated":"2023-09-11T10:55:30Z","publication":"The Ninth International Conference on Learning Representations"},{"year":"2021","oa":1,"page":"16451-16467","publication":"Advances in Neural Information Processing Systems","date_updated":"2023-09-11T10:33:19Z","date_published":"2021-06-08T00:00:00Z","extern":"1","external_id":{"arxiv":["2106.04619"]},"date_created":"2023-08-22T14:04:36Z","department":[{"_id":"FrLo"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","quality_controlled":"1","type":"conference","language":[{"iso":"eng"}],"intvolume":"        34","main_file_link":[{"url":"https://arxiv.org/abs/2106.04619","open_access":"1"}],"abstract":[{"lang":"eng","text":"Self-supervised representation learning has shown remarkable success in a number of domains. A common practice is to perform data augmentation via hand-crafted transformations intended to leave the semantics of the data invariant. We seek to understand the empirical success of this approach from a theoretical perspective. We formulate the augmentation process as a latent variable model by postulating a partition of the latent representation into a content component, which is assumed invariant to augmentation, and a style component, which is allowed to change. Unlike prior work on disentanglement and independent component analysis, we allow for both nontrivial statistical and causal dependencies in the latent space. We study the identifiability of the latent representation based on pairs of views of the observations and prove sufficient conditions that allow us to identify the invariant content partition up to an invertible mapping in both generative and discriminative settings. We find numerical simulations with dependent latent variables are consistent with our theory. Lastly, we introduce Causal3DIdent, a dataset of high-dimensional, visually complex images with rich causal dependencies, which we use to study the effect of data augmentations performed in practice."}],"title":"Self-supervised learning with data augmentations provably isolates content from style","author":[{"first_name":"Julius von","last_name":"Kügelgen","full_name":"Kügelgen, Julius von"},{"full_name":"Sharma, Yash","last_name":"Sharma","first_name":"Yash"},{"first_name":"Luigi","full_name":"Gresele, Luigi","last_name":"Gresele"},{"first_name":"Wieland","last_name":"Brendel","full_name":"Brendel, Wieland"},{"first_name":"Bernhard","full_name":"Schölkopf, Bernhard","last_name":"Schölkopf"},{"first_name":"Michel","full_name":"Besserve, Michel","last_name":"Besserve"},{"orcid":"0000-0002-4850-0683","last_name":"Locatello","full_name":"Locatello, Francesco","first_name":"Francesco","id":"26cfd52f-2483-11ee-8040-88983bcc06d4"}],"_id":"14179","status":"public","publication_status":"published","publication_identifier":{"isbn":["9781713845393"]},"oa_version":"Preprint","conference":{"name":"NeurIPS: Neural Information Processing Systems","end_date":"2021-12-10","start_date":"2021-12-07","location":"Virtual"},"citation":{"short":"J. von Kügelgen, Y. Sharma, L. Gresele, W. Brendel, B. Schölkopf, M. Besserve, F. Locatello, in:, Advances in Neural Information Processing Systems, 2021, pp. 16451–16467.","ista":"Kügelgen J von, Sharma Y, Gresele L, Brendel W, Schölkopf B, Besserve M, Locatello F. 2021. Self-supervised learning with data augmentations provably isolates content from style. Advances in Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems vol. 34, 16451–16467.","ama":"Kügelgen J von, Sharma Y, Gresele L, et al. Self-supervised learning with data augmentations provably isolates content from style. In: <i>Advances in Neural Information Processing Systems</i>. Vol 34. ; 2021:16451-16467.","ieee":"J. von Kügelgen <i>et al.</i>, “Self-supervised learning with data augmentations provably isolates content from style,” in <i>Advances in Neural Information Processing Systems</i>, Virtual, 2021, vol. 34, pp. 16451–16467.","apa":"Kügelgen, J. von, Sharma, Y., Gresele, L., Brendel, W., Schölkopf, B., Besserve, M., &#38; Locatello, F. (2021). Self-supervised learning with data augmentations provably isolates content from style. In <i>Advances in Neural Information Processing Systems</i> (Vol. 34, pp. 16451–16467). Virtual.","mla":"Kügelgen, Julius von, et al. “Self-Supervised Learning with Data Augmentations Provably Isolates Content from Style.” <i>Advances in Neural Information Processing Systems</i>, vol. 34, 2021, pp. 16451–67.","chicago":"Kügelgen, Julius von, Yash Sharma, Luigi Gresele, Wieland Brendel, Bernhard Schölkopf, Michel Besserve, and Francesco Locatello. “Self-Supervised Learning with Data Augmentations Provably Isolates Content from Style.” In <i>Advances in Neural Information Processing Systems</i>, 34:16451–67, 2021."},"day":"08","month":"06","article_processing_charge":"No","volume":34,"arxiv":1},{"oa":1,"year":"2021","date_updated":"2023-09-11T11:33:46Z","publication":"Advances in Neural Information Processing Systems","page":"10985-10998","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","department":[{"_id":"FrLo"}],"date_created":"2023-08-22T14:04:55Z","external_id":{"arxiv":["2110.06399"]},"date_published":"2021-10-12T00:00:00Z","extern":"1","main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2110.06399"}],"intvolume":"        34","language":[{"iso":"eng"}],"quality_controlled":"1","type":"conference","_id":"14180","title":"Dynamic inference with neural interpreters","author":[{"last_name":"Rahaman","full_name":"Rahaman, Nasim","first_name":"Nasim"},{"first_name":"Muhammad Waleed","last_name":"Gondal","full_name":"Gondal, Muhammad Waleed"},{"full_name":"Joshi, Shruti","last_name":"Joshi","first_name":"Shruti"},{"first_name":"Peter","full_name":"Gehler, Peter","last_name":"Gehler"},{"first_name":"Yoshua","full_name":"Bengio, Yoshua","last_name":"Bengio"},{"orcid":"0000-0002-4850-0683","full_name":"Locatello, Francesco","last_name":"Locatello","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","first_name":"Francesco"},{"full_name":"Schölkopf, Bernhard","last_name":"Schölkopf","first_name":"Bernhard"}],"abstract":[{"text":"Modern neural network architectures can leverage large amounts of data to generalize well within the training distribution. However, they are less capable of systematic generalization to data drawn from unseen but related distributions, a feat that is hypothesized to require compositional reasoning and reuse of knowledge. In this work, we present Neural Interpreters, an architecture that factorizes inference in a self-attention network as a system of modules, which we call \\emph{functions}. Inputs to the model are routed through a sequence of functions in a way that is end-to-end learned. The proposed architecture can flexibly compose computation along width and depth, and lends itself well to capacity extension after training. To demonstrate the versatility of Neural Interpreters, we evaluate it in two distinct settings: image classification and visual abstract reasoning on Raven Progressive Matrices. In the former, we show that Neural Interpreters perform on par with the vision transformer using fewer parameters, while being transferrable to a new task in a sample efficient manner. In the latter, we find that Neural Interpreters are competitive with respect to the state-of-the-art in terms of systematic generalization. ","lang":"eng"}],"publication_identifier":{"isbn":["9781713845393"]},"status":"public","publication_status":"published","month":"10","citation":{"mla":"Rahaman, Nasim, et al. “Dynamic Inference with Neural Interpreters.” <i>Advances in Neural Information Processing Systems</i>, vol. 34, 2021, pp. 10985–98.","chicago":"Rahaman, Nasim, Muhammad Waleed Gondal, Shruti Joshi, Peter Gehler, Yoshua Bengio, Francesco Locatello, and Bernhard Schölkopf. “Dynamic Inference with Neural Interpreters.” In <i>Advances in Neural Information Processing Systems</i>, 34:10985–98, 2021.","short":"N. Rahaman, M.W. Gondal, S. Joshi, P. Gehler, Y. Bengio, F. Locatello, B. Schölkopf, in:, Advances in Neural Information Processing Systems, 2021, pp. 10985–10998.","ama":"Rahaman N, Gondal MW, Joshi S, et al. Dynamic inference with neural interpreters. In: <i>Advances in Neural Information Processing Systems</i>. Vol 34. ; 2021:10985-10998.","ista":"Rahaman N, Gondal MW, Joshi S, Gehler P, Bengio Y, Locatello F, Schölkopf B. 2021. Dynamic inference with neural interpreters. Advances in Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems vol. 34, 10985–10998.","apa":"Rahaman, N., Gondal, M. W., Joshi, S., Gehler, P., Bengio, Y., Locatello, F., &#38; Schölkopf, B. (2021). Dynamic inference with neural interpreters. In <i>Advances in Neural Information Processing Systems</i> (Vol. 34, pp. 10985–10998). Virtual.","ieee":"N. Rahaman <i>et al.</i>, “Dynamic inference with neural interpreters,” in <i>Advances in Neural Information Processing Systems</i>, Virtual, 2021, vol. 34, pp. 10985–10998."},"day":"12","oa_version":"Preprint","conference":{"name":"NeurIPS: Neural Information Processing Systems","end_date":"2021-12-10","location":"Virtual","start_date":"2021-12-07"},"arxiv":1,"article_processing_charge":"No","volume":34},{"page":"2337-2343","date_updated":"2023-09-11T11:14:30Z","publication":"Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence","year":"2021","oa":1,"type":"conference","quality_controlled":"1","language":[{"iso":"eng"}],"publisher":"International Joint Conferences on Artificial Intelligence","main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2105.09240"}],"date_created":"2023-08-22T14:05:14Z","date_published":"2021-05-19T00:00:00Z","external_id":{"arxiv":["2105.09240"]},"extern":"1","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","department":[{"_id":"FrLo"}],"status":"public","publication_status":"published","publication_identifier":{"eisbn":["9780999241196"]},"doi":"10.24963/ijcai.2021/322","abstract":[{"text":"Variational Inference makes a trade-off between the capacity of the variational family and the tractability of finding an approximate posterior distribution. Instead, Boosting Variational Inference allows practitioners to obtain increasingly good posterior approximations by spending more compute. The main obstacle to widespread adoption of Boosting Variational Inference is the amount of resources necessary to improve over a strong Variational Inference baseline. In our work, we trace this limitation back to the global curvature of the KL-divergence. We characterize how the global curvature impacts time and memory consumption, address the problem with the notion of local curvature, and provide a novel approximate backtracking algorithm for estimating local curvature. We give new theoretical convergence rates for our algorithms and provide experimental validation on synthetic and real-world datasets.","lang":"eng"}],"_id":"14181","title":"Boosting variational inference with locally adaptive step-sizes","author":[{"first_name":"Gideon","full_name":"Dresdner, Gideon","last_name":"Dresdner"},{"last_name":"Shekhar","full_name":"Shekhar, Saurav","first_name":"Saurav"},{"first_name":"Fabian","last_name":"Pedregosa","full_name":"Pedregosa, Fabian"},{"id":"26cfd52f-2483-11ee-8040-88983bcc06d4","first_name":"Francesco","full_name":"Locatello, Francesco","last_name":"Locatello","orcid":"0000-0002-4850-0683"},{"first_name":"Gunnar","full_name":"Rätsch, Gunnar","last_name":"Rätsch"}],"article_processing_charge":"No","arxiv":1,"oa_version":"Published Version","conference":{"start_date":"2021-08-19","location":"Montreal, Canada","end_date":"2021-08-27","name":"IJCAI: International Joint Conference on Artificial Intelligence"},"month":"05","citation":{"chicago":"Dresdner, Gideon, Saurav Shekhar, Fabian Pedregosa, Francesco Locatello, and Gunnar Rätsch. “Boosting Variational Inference with Locally Adaptive Step-Sizes.” In <i>Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence</i>, 2337–43. International Joint Conferences on Artificial Intelligence, 2021. <a href=\"https://doi.org/10.24963/ijcai.2021/322\">https://doi.org/10.24963/ijcai.2021/322</a>.","mla":"Dresdner, Gideon, et al. “Boosting Variational Inference with Locally Adaptive Step-Sizes.” <i>Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence</i>, International Joint Conferences on Artificial Intelligence, 2021, pp. 2337–43, doi:<a href=\"https://doi.org/10.24963/ijcai.2021/322\">10.24963/ijcai.2021/322</a>.","ista":"Dresdner G, Shekhar S, Pedregosa F, Locatello F, Rätsch G. 2021. Boosting variational inference with locally adaptive step-sizes. Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence. IJCAI: International Joint Conference on Artificial Intelligence, 2337–2343.","short":"G. Dresdner, S. Shekhar, F. Pedregosa, F. Locatello, G. Rätsch, in:, Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence, International Joint Conferences on Artificial Intelligence, 2021, pp. 2337–2343.","ama":"Dresdner G, Shekhar S, Pedregosa F, Locatello F, Rätsch G. Boosting variational inference with locally adaptive step-sizes. In: <i>Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence</i>. International Joint Conferences on Artificial Intelligence; 2021:2337-2343. doi:<a href=\"https://doi.org/10.24963/ijcai.2021/322\">10.24963/ijcai.2021/322</a>","apa":"Dresdner, G., Shekhar, S., Pedregosa, F., Locatello, F., &#38; Rätsch, G. (2021). Boosting variational inference with locally adaptive step-sizes. In <i>Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence</i> (pp. 2337–2343). Montreal, Canada: International Joint Conferences on Artificial Intelligence. <a href=\"https://doi.org/10.24963/ijcai.2021/322\">https://doi.org/10.24963/ijcai.2021/322</a>","ieee":"G. Dresdner, S. Shekhar, F. Pedregosa, F. Locatello, and G. Rätsch, “Boosting variational inference with locally adaptive step-sizes,” in <i>Proceedings of the Thirtieth International Joint Conference on Artificial Intelligence</i>, Montreal, Canada, 2021, pp. 2337–2343."},"day":"19"},{"quality_controlled":"1","language":[{"iso":"eng"}],"type":"conference","main_file_link":[{"open_access":"1","url":"https://arxiv.org/abs/2107.01057"}],"intvolume":"        34","external_id":{"arxiv":["2107.01057"]},"date_published":"2021-07-02T00:00:00Z","extern":"1","date_created":"2023-08-22T14:05:41Z","department":[{"_id":"FrLo"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","page":"116-128","publication":"35th Conference on Neural Information Processing Systems","date_updated":"2023-09-11T11:31:59Z","year":"2021","oa":1,"volume":34,"article_processing_charge":"No","arxiv":1,"conference":{"name":"NeurIPS: Neural Information Processing Systems","end_date":"2021-12-10","location":"Virtual","start_date":"2021-12-07"},"oa_version":"Preprint","day":"02","citation":{"ieee":"F. Träuble, J. von Kügelgen, M. Kleindessner, F. Locatello, B. Schölkopf, and P. Gehler, “Backward-compatible prediction updates: A probabilistic approach,” in <i>35th Conference on Neural Information Processing Systems</i>, Virtual, 2021, vol. 34, pp. 116–128.","apa":"Träuble, F., Kügelgen, J. von, Kleindessner, M., Locatello, F., Schölkopf, B., &#38; Gehler, P. (2021). Backward-compatible prediction updates: A probabilistic approach. In <i>35th Conference on Neural Information Processing Systems</i> (Vol. 34, pp. 116–128). Virtual.","ista":"Träuble F, Kügelgen J von, Kleindessner M, Locatello F, Schölkopf B, Gehler P. 2021. Backward-compatible prediction updates: A probabilistic approach. 35th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems vol. 34, 116–128.","ama":"Träuble F, Kügelgen J von, Kleindessner M, Locatello F, Schölkopf B, Gehler P. Backward-compatible prediction updates: A probabilistic approach. In: <i>35th Conference on Neural Information Processing Systems</i>. Vol 34. ; 2021:116-128.","short":"F. Träuble, J. von Kügelgen, M. Kleindessner, F. Locatello, B. Schölkopf, P. Gehler, in:, 35th Conference on Neural Information Processing Systems, 2021, pp. 116–128.","chicago":"Träuble, Frederik, Julius von Kügelgen, Matthäus Kleindessner, Francesco Locatello, Bernhard Schölkopf, and Peter Gehler. “Backward-Compatible Prediction Updates: A Probabilistic Approach.” In <i>35th Conference on Neural Information Processing Systems</i>, 34:116–28, 2021.","mla":"Träuble, Frederik, et al. “Backward-Compatible Prediction Updates: A Probabilistic Approach.” <i>35th Conference on Neural Information Processing Systems</i>, vol. 34, 2021, pp. 116–28."},"month":"07","status":"public","publication_status":"published","publication_identifier":{"isbn":["9781713845393"]},"abstract":[{"lang":"eng","text":"When machine learning systems meet real world applications, accuracy is only\r\none of several requirements. In this paper, we assay a complementary\r\nperspective originating from the increasing availability of pre-trained and\r\nregularly improving state-of-the-art models. While new improved models develop\r\nat a fast pace, downstream tasks vary more slowly or stay constant. Assume that\r\nwe have a large unlabelled data set for which we want to maintain accurate\r\npredictions. Whenever a new and presumably better ML models becomes available,\r\nwe encounter two problems: (i) given a limited budget, which data points should\r\nbe re-evaluated using the new model?; and (ii) if the new predictions differ\r\nfrom the current ones, should we update? Problem (i) is about compute cost,\r\nwhich matters for very large data sets and models. Problem (ii) is about\r\nmaintaining consistency of the predictions, which can be highly relevant for\r\ndownstream applications; our demand is to avoid negative flips, i.e., changing\r\ncorrect to incorrect predictions. In this paper, we formalize the Prediction\r\nUpdate Problem and present an efficient probabilistic approach as answer to the\r\nabove questions. In extensive experiments on standard classification benchmark\r\ndata sets, we show that our method outperforms alternative strategies along key\r\nmetrics for backward-compatible prediction updates."}],"author":[{"last_name":"Träuble","full_name":"Träuble, Frederik","first_name":"Frederik"},{"first_name":"Julius von","full_name":"Kügelgen, Julius von","last_name":"Kügelgen"},{"first_name":"Matthäus","full_name":"Kleindessner, Matthäus","last_name":"Kleindessner"},{"first_name":"Francesco","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","last_name":"Locatello","full_name":"Locatello, Francesco","orcid":"0000-0002-4850-0683"},{"first_name":"Bernhard","last_name":"Schölkopf","full_name":"Schölkopf, Bernhard"},{"full_name":"Gehler, Peter","last_name":"Gehler","first_name":"Peter"}],"title":"Backward-compatible prediction updates: A probabilistic approach","_id":"14182"}]
