@inproceedings{4b11605de53944a7a4f8b1eb02556929,
title = "Robust Generation of Channel Distributions with Diffusion Models",
abstract = "Training neural encoders requires a differentiable channel model for backpropagation. This can be bypassed by approximating the channel distribution using pilot signals. A common method for this is the use of generative adversarial networks (GANs). In this paper, we introduce diffusion models (DMs) for channel generation and propose an efficient training algorithm. Our DMs provide a solution that achieves near-optimal end-to-end symbol error rates (SERs). Importantly, DMs outperform GANs in high signal-to-noise ratio regions. Here, in particular, we explore the trade-off between sample quality and speed. We also show that the right noise scheduling can significantly reduce sampling time with a minor increase in SER.",
keywords = "Channel generation, diffusion model, end-to-end learning, generative networks",
author = "Muah Kim and Rick Fritschek and Schaefer, {Rafael F.}",
note = "Publisher Copyright: {\textcopyright} 2024 IEEE.; 59th Annual IEEE International Conference on Communications, ICC 2024 ; Conference date: 09-06-2024 Through 13-06-2024",
year = "2024",
doi = "10.1109/ICC51166.2024.10622251",
language = "English",
series = "IEEE International Conference on Communications",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "330--335",
editor = "Matthew Valenti and David Reed and Melissa Torres",
booktitle = "ICC 2024 - IEEE International Conference on Communications",
}