@inproceedings{39cef8e5771c466984fa2b882ac09226,
title = "Towards Fairer and More Efficient Federated Learning via Multidimensional Personalized Edge Models",
abstract = "Federated learning (FL) is an emerging technique that trains massive and geographically distributed edge data while maintaining privacy. However, FL has inherent challenges in terms of fairness and computational efficiency due to the rising heterogeneity of edges, and thus usually results in sub-optimal performance in recent state-of-the-art (SOTA) solutions. In this paper, we propose a Customized Federated Learning (CFL) system to eliminate FL heterogeneity from multiple dimensions. Specifically, CFL tailors personalized models from the specially designed global model for each client jointly guided by an online trained model-search helper and a novel aggregation algorithm. Extensive experiments demonstrate that CFL has full-stack advantages for both FL training and edge reasoning and significantly improves the SOTA performance w.r.t. model accuracy (up to 7.2% in the non-heterogeneous environment and up to 21.8% in the heterogeneous environment), efficiency, and FL fairness.",
keywords = "Deep Learning, Edge Computing, Federated Learning, Model Compression, Neural Architecture Search",
author = "Yingchun Wang and Jingcai Guo and Jie Zhang and Song Guo and Zhang, {W. J.} and Qinghua Zheng",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.",
year = "2023",
month = aug,
doi = "10.1109/IJCNN54540.2023.10191956",
language = "English",
series = "Proceedings of the International Joint Conference on Neural Networks",
pages = "1--8",
booktitle = "IJCNN 2023 - International Joint Conference on Neural Networks, Proceedings",
}