@inproceedings{70c408a78e524fa8914da6d2e4aa640d,
title = "Accelerating Data Loading in Deep Neural Network Training",
abstract = "Data loading can dominate deep neural network training time on large-scale systems. We present a comprehensive study on accelerating data loading performance in large-scale distributed training. We first identify performance and scalability issues in current data loading implementations. We then propose optimizations that utilize CPU resources to the data loader design. We use an analytical model to characterize the impact of data loading on the overall training time and establish the performance trend as we scale up distributed training. Our model suggests that I/O rate limits the scalability of distributed training, which inspires us to design a locality-aware data loading method. By utilizing software caches, our method can drastically reduce the data loading communication volume in comparison with the original data loading implementation. Finally, we evaluate the proposed optimizations with various experiments. We achieved more than 30x speedup in data loading using 256 nodes with 1,024 learners.",
keywords = "data loading, data locality, distributed training, machine learning, scalability",
author = "Yang, {Chih Chieh} and Guojing Cong",
note = "Publisher Copyright: {\textcopyright} 2019 IEEE.; 26th Annual IEEE International Conference on High Performance Computing, HiPC 2019 ; Conference date: 17-12-2019 Through 20-12-2019",
year = "2019",
month = dec,
doi = "10.1109/HiPC.2019.00037",
language = "English",
series = "Proceedings - 26th IEEE International Conference on High Performance Computing, HiPC 2019",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "235--245",
booktitle = "Proceedings - 26th IEEE International Conference on High Performance Computing, HiPC 2019",
}