@inproceedings{d1c70f2fa1e746cc98263d5006af7b39,
title = "Privacy-Preserving LLM Agent for Multi-modal Health Monitoring",
abstract = "Tool-using LLM agents for health monitoring raise critical privacy concerns as they share sensitive patient data with cloud providers and third-party models. This study presents HealthAgent, a privacy-preserving LLM agent framework that protects both user queries and multi-modal sensor data through homomorphic encryption. HealthAgent enables an LLM orchestrator to coordinate specialized AI models for complex health assessments while processing all data in encrypted form. The system achieves 95\% task decomposition accuracy with 10 s latency, demonstrating that strong privacy guarantees can be maintained without sacrificing real-time performance in health monitoring applications.",
keywords = "Health, Homomorphic encryption, Large language models (LLM), Privacy",
author = "Qipeng Xie and Jiafei Wu and Weiyu Wang and Zhuotao Lian and Mu Yuan and Xian Shuai and Weizheng Wang and Yuan Haoyi and Haibo Hu and Kaishun Wu",
note = "Publisher Copyright: {\textcopyright} The Author(s), under exclusive license to Springer Nature Singapore Pte Ltd. 2026.; 19th International Conference on Provable and Practical Security, ProvSec 2025 ; Conference date: 10-10-2025 Through 12-10-2025",
year = "2025",
month = nov,
doi = "10.1007/978-981-95-2961-2\_27",
language = "English",
isbn = "9789819529605",
series = "Lecture Notes in Computer Science",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "488--492",
editor = "Guomin Yang and Shengli Liu and Chunhua Su and Akira Otsuka and Zhuotao Lian",
booktitle = "Provable and Practical Security - 19th International Conference, ProvSec 2025, Proceedings",
address = "Germany",
}