@inproceedings{9f1d28b9e159442c94c9f61a7fd1b7d5,
title = "Spiking Neural Network Accelerator Architecture for Differential-Time Representation using Learned Encoding",
abstract = "Spiking Neural Networks (SNNs) have garnered attention over recent years due to their increased energy efficiency and advantages in terms of operational complexity compared to traditional Artificial Neural Networks (ANNs). Two important questions when implementing SNNs are how to best encode existing data into spike trains and how to efficiently process these spike trains in hardware. This paper addresses both of these problems by incorporating the encoding into the learning process, thus allowing the network to learn the spike encoding alongside the weights. Furthermore, this paper proposes a hardware architecture based on a recently introduced differential-time representation for spike trains allowing decoupling of spike time and processing time. Together these contributions lead to a feedforward SNN using only Leaky-Integrate and Fire (LIF) neurons that surpasses 99\% accuracy on the MNIST dataset while still being implementable on medium-sized FPGAs with inference times of less than 295µs.",
keywords = "Accuracy, Neurons, Accelerator architectures, Spiking neural networks, Encoding, Hardware, Energy efficiency, Complexity theory, Feedforward systems, Field programmable gate arrays",
author = "Daniel Windhager and Lothar Ratschbacher and Moser, \{Bernhard A.\} and Michael Lunglmayr",
year = "2025",
month = jun,
day = "27",
doi = "10.1109/ISCAS56072.2025.11043954",
language = "English",
isbn = "979-8-3503-5684-7",
series = "Proceedings - IEEE International Symposium on Circuits and Systems",
publisher = "IEEE",
pages = "1--5",
booktitle = "2025 IEEE International Symposium on Circuits and Systems (ISCAS)",
edition = "1",
note = "2025 IEEE International Symposium on Circuits and Systems (ISCAS) ; Conference date: 25-05-2025 Through 28-05-2025",
}