fedLearning.bib
1 @inproceedings{bellSecureSingleServerAggregation2020, 2 title = {Secure {{Single-Server Aggregation}} with ({{Poly}}){{Logarithmic Overhead}}}, 3 booktitle = {Proceedings of the 2020 {{ACM SIGSAC Conference}} on {{Computer}} and {{Communications Security}}}, 4 author = {Bell, James Henry and Bonawitz, Kallista A. and Gascón, Adrià and Lepoint, Tancrède and Raykova, Mariana}, 5 date = {2020-11-02}, 6 year = {2020}, 7 series = {{{CCS}} '20}, 8 pages = {1253--1269}, 9 publisher = {{Association for Computing Machinery}}, 10 location = {{New York, NY, USA}}, 11 doi = {10.1145/3372297.3417885}, 12 url = {https://dl.acm.org/doi/10.1145/3372297.3417885}, 13 urldate = {2023-10-30}, 14 abstract = {Secure aggregation is a cryptographic primitive that enables a server to learn the sum of the vector inputs of many clients. Bonawitz et al. (CCS 2017) presented a construction that incurs computation and communication for each client linear in the number of parties. While this functionality enables a broad range of privacy preserving computational tasks, scaling concerns limit its scope of use. We present the first constructions for secure aggregation that achieve polylogarithmic communication and computation per client. Our constructions provide security in the semi-honest and the semi-malicious settings where the adversary controls the server and a δ-fraction of the clients, and correctness with up to δ-fraction dropouts among the clients. Our constructions show how to replace the complete communication graph of Bonawitz et al., which entails the linear overheads, with a k-regular graph of logarithmic degree while maintaining the security guarantees. Beyond improving the known asymptotics for secure aggregation, our constructions also achieve very efficient concrete parameters. The semi-honest secure aggregation can handle a billion clients at the per-client cost of the protocol of Bonawitz et al. for a thousand clients. In the semi-malicious setting with 10 4 clients, each client needs to communicate only with 3\% of the clients to have a guarantee that its input has been added together with the inputs of at least 5000 other clients, while withstanding up to 5\% corrupt clients and 5\% dropouts. We also show an application of secure aggregation to the task of secure shuffling which enables the first cryptographically secure instantiation of the shuffle model of differential privacy.}, 15 isbn = {978-1-4503-7089-9}, 16 keywords = {multi-party computation,secure aggregation,secure shuffling}, 17 file = {/home/fee/Zotero/storage/JF5DXTMU/Bell et al. - 2020 - Secure Single-Server Aggregation with (Poly)Logari.pdf} 18 } 19 20 @article{graserRoleSpatialData2022, 21 title = {On the {{Role}} of {{Spatial Data Science}} for {{Federated Learning}}}, 22 author = {Graser, Anita and Heistracher, Clemens and Pruckovskaja, Viktorija}, 23 date = {2022-09-09}, 24 year = {2022}, 25 doi = {10.25436/E24K5T}, 26 url = {https://escholarship.org/uc/item/7mg5655h}, 27 urldate = {2023-10-27}, 28 abstract = {Federated learning (FL) has the potential to mitigate privacy risks and communication costs associated with classical machine learning and data science approaches. Given the distributed nature of FL, many of its use cases face challenges related to spatiotemporal data, geographical analysis, and spatial statistics. However, so far, FL has received little attention by the GIScience community. In this paper, we provide a first overview of the key challenges in FL and how they relate to spatial data science. This paper thus aims to provide the basis for future contributions to federated learning practices by the (geo)spatial research community.}, 29 langid = {english}, 30 file = {/home/fee/Zotero/storage/DHBGZN33/Graser et al. - 2022 - On the Role of Spatial Data Science for Federated .pdf} 31 } 32 33 @online{mathurOndeviceFederatedLearning2021, 34 title = {On-Device {{Federated Learning}} with {{Flower}}}, 35 author = {Mathur, Akhil and Beutel, Daniel J. and Gusmão, de Pedro Porto Buarque and Fernandez-Marques, Javier and Topal, Taner and Qiu, Xinchi and Parcollet, Titouan and Gao, Yan and Lane, Nicholas D.}, 36 date = {2021-04-07}, 37 year = {2021}, 38 eprint = {2104.03042}, 39 eprinttype = {arxiv}, 40 eprintclass = {cs}, 41 doi = {10.48550/arXiv.2104.03042}, 42 url = {http://arxiv.org/abs/2104.03042}, 43 urldate = {2023-10-30}, 44 abstract = {Federated Learning (FL) allows edge devices to collaboratively learn a shared prediction model while keeping their training data on the device, thereby decoupling the ability to do machine learning from the need to store data in the cloud. Despite the algorithmic advancements in FL, the support for on-device training of FL algorithms on edge devices remains poor. In this paper, we present an exploration of on-device FL on various smartphones and embedded devices using the Flower framework. We also evaluate the system costs of on-device FL and discuss how this quantification could be used to design more efficient FL algorithms.}, 45 pubstate = {preprint}, 46 keywords = {Computer Science - Artificial Intelligence,{Computer Science - Distributed, Parallel, and Cluster Computing},Computer Science - Machine Learning,I.0}, 47 file = {/home/fee/Zotero/storage/VWSWJ6F6/Mathur et al. - 2021 - On-device Federated Learning with Flower.pdf;/home/fee/Zotero/storage/JE7MAB26/2104.html} 48 }