publ-2021.bib
@comment{{This file has been generated by bib2bib 1.99}}
@comment{{Command line: bib2bib -q -ob publ-2021.bib --remove keywords -c 'year = 2021' /home/jantsch/Website/jantsch.se/AxelJantsch/publist.bib}}
@comment{{This file has been generated by bib2bib 1.99}}
@comment{{Command line: bib2bib -q -oc /home/jantsch/Website/jantsch.se/AxelJantsch/citefile -ob /home/jantsch/Website/jantsch.se/AxelJantsch/publist.bib -c '(( author : "Jantsch" or ( editor : "Jantsch" and $type : "book" ))
and ( not ( $key : "presentation" ))
and ( not ( $type : "techreport" ))
and ( not ( $type : "misc" ))
and ( not ( annotate : "not reviewed" )))
or $key = "hauer:2021a"
' /home/jantsch/text/papers/lit.bib}}
@comment{{Example entry for online references:
miscWinNT,
title = MS Windows NT Kernel Description,
howpublished = \urlhttp://web.archive.org/web/20080207010024/http://www.808multimedia.com/winnt/kernel.htm,
note = Accessed: 2010-09-30
}}
@comment{{Example:
miscWikiQuineMcCluskey,
title = Quine-McCluskey Algorithm,
author = Wikipedia ,
year = 2021,
howpublished =
\urlhttps://en.wikipedia.org/wiki/Quine%E2%80%93McCluskey_algorithm,
note = Accessed: 2021-08-11
}}
@inproceedings{elderhalli:2021a,
author = {Yassmeen Elderhalli and Nahla El-Araby and Osman
Hasan and Axel Jantsch and Sofiene Tahar },
title = {Dynamic Fault Tree Models for {FPGA} Fault Tolerance and Reliability },
booktitle = { Proceedings of the IEEE Computer Society Annual
Symposium on VLSI (ISVLSI) },
year = 2021,
month = {July},
address = {Tampa, Florida, USA},
url = {http://jantsch.se/AxelJantsch/papers/2021/NahlaElAraby-ISVLSI.pdf}
}
@inproceedings{hauer:2021b,
author = {Daniel Hauer and Maximilian G\"{o}tzinger and Axel
Jantsch and Florian Kintzler },
title = {Context Aware Monitoring for Smart Grids },
key = {selfaware,monitoring},
booktitle = {Proceedings of the International Symposium on
Industrial Electronics (ISIE) },
year = 2021,
month = {June},
address = {Kyoto, Japan},
url = {http://jantsch.se/AxelJantsch/papers/2021/DanielHauer-ISIE.pdf
}
}
@article{mozelli:2021a,
author = {Amid Mozelli and Nima Taherinejad and Axel Jantsch},
title = {A Study on Confidence: an Unsupervised Multi-Agent
Machine Learning Experiment },
journal = {IEEE Design \& Test of Computers },
year = 2021,
key = {eml,cdl,ict,selfaware},
issn = { 2168-2356 },
doi = { 10.1109/MDAT.2021.3078341 }
}
@article{leal:2021a,
author = {Isaac S\'{a}nchez Leal and Irida Shallari and Silvia
Krug and Axel Jantsch and Mattias O'Nils },
title = { Impact of Input Data on Intelligence Partitioning
Decisions for {IoT} Smart Camera Nodes },
journal = { Electronics },
year = 2021,
volume = 10,
number = 16,
key = {eml},
annote = { SCIE indexed },
issn = { 2079-9292 },
doi = {10.3390/electronics10161898},
url = {http://jantsch.se/AxelJantsch/papers/2021/IsaacLeal-MDPIElectronics.pdf}
}
@article{shallari:2021a,
author = {Shallari, Irida and S\'{a}nchez Leal, Isaac and Krug,
Silvia and Jantsch, Axel and O'Nils, Mattias},
journal = {IEEE Access},
title = {{Design space exploration on IoT node: Trade-offs in
processing and communication}},
year = 2021,
key = {eml},
doi = {10.1109/ACCESS.2021.3074875},
url = {http://jantsch.se/AxelJantsch/papers/2021/IridaShallari-IEEEAccess.pdf},
issn = {2169-3536}
}
@inproceedings{haas:2021a,
author = { Bernhard Haas and Alexander Wendt and Axel Jantsch
and Matthias Wess },
title = {Neural Network Compression Through Shunt Connections
and Knowledge Distillation for Semantic Segmentation
Problems },
key = {eml,cdl,ict},
booktitle = {17th International Conference on Artificial
Intelligence Applications and Innovations (AIAI)},
month = {June},
doi = {https://doi.org/10.1007/978-3-030-79150-6},
year = 2021,
url = {https://link.springer.com/chapter/10.1007/978-3-030-79150-6_28}
}
@incollection{donyanavard:2021a,
author = {Bryan Donyanavard and Amir M. Rahmani and Axel
Jantsch and Onur Mutlu and Nikil Dutt },
title = {Intelligent Management of Mobile Systems Through
Computational Self-Awareness },
booktitle = {Handbook of Research on Methodologies and
Applications of Supercomputing },
key = {selfaware},
publisher = { IGI Global },
year = 2021,
editor = {Veljko Milutinovi\'{c} and Milo\v{s} Kotlar},
pages = {41--73},
month = {Febraury},
isbn = { 9781799871569 },
doi = { 10.4018/978-1-7998-7156-9 },
url = {http://arxiv.org/abs/2008.00095}
}
@misc{hauer:2021a,
title = {{MELODI}: A Mass E-Learning System for Design, Test,
and Prototyping of Digital Hardware },
author = { Daniel Hauer and Friedrich Bauer and Felix Braun and Axel
Jantsch and Markus D. Kobelrausch and Martin Mosbeck
and Nima TaheriNejad and Philipp-Sebastian Vogt},
howpublished = {DATE 2021 University Booth Tool Demonstration},
month = {February},
year = 2021,
key = {VELS},
note = {Best University Booth Award},
url = {http://jantsch.se/AxelJantsch/papers/2021/DanielHauer-DATEUniversityBooth.pdf}
}
@article{lechner:2021a,
author = {Martin Lechner and Axel Jantsch},
title = {Blackthorn: Latency Estimation Framework for {CNNs}
on Embedded {Nvidia} Platforms},
journal = {IEEE Access},
year = 2021,
key = {eml,cdl,ict},
doi = {10.1109/ACCESS.2021.3101936},
url = {http://jantsch.se/AxelJantsch/papers/2021/MartinLechner-IEEEAccess.pdf}
}
@article{wess:2021a,
author = {Matthias Wess and Marco Ivanov and Christian Unger and
Anvesh Nookala and Alexander Wendt and Axel Jantsch},
journal = {IEEE Access},
title = {{ANNETTE}: Accurate Neural Network Execution Time
Estimation With Stacked Models},
year = 2021,
volume = 9,
pages = {3545-3556},
key = {eml,cdl,ict},
abstract = {With new accelerator hardware for Deep Neural
Networks (DNNs), the computing power for Artificial
Intelligence (AI) applications has increased
rapidly. However, as DNN algorithms become more
complex and optimized for specific applications,
latency requirements remain challenging, and it is
critical to find the optimal points in the design
space. To decouple the architectural search from the
target hardware, we propose a time estimation
framework that allows for modeling the inference
latency of DNNs on hardware accelerators based on
mapping and layer-wise estimation models. The
proposed methodology extracts a set of models from
micro-kernel and multi-layer benchmarks and
generates a stacked model for mapping and network
execution time estimation. We compare estimation
accuracy and fidelity of the generated mixed models,
statistical models with the roofline model, and a
refined roofline model for evaluation. We test the
mixed models on the ZCU102 SoC board with Xilinx
Deep Neural Network Development Kit (DNNDK) and
Intel Neural Compute Stick 2 (NCS2) on a set of 12
state-of-the-art neural networks. It shows an
average estimation error of 3.47\% for the DNNDK and
7.44\% for the NCS2, outperforming the statistical
and analytical layer models for almost all selected
networks. For a randomly selected subset of 34
networks of the NASBench dataset, the mixed model
reaches fidelity of 0.988 in Spearman’s $\rho $ rank
correlation coefficient metric.},
doi = {10.1109/ACCESS.2020.3047259},
url = {http://jantsch.se/AxelJantsch/papers/2021/MatthiasWess-IEEEAccess.pdf},
issn = {2169-3536}
}
@inproceedings{colucci:2021a,
author = {Alessio Colucci and D\'avid Juh\'asz and Martin Mosbeck
and Alberto Marchisio and Semeen Rehman and Manfred
Kreutzer and G\"{u}nter Nadbath and Axel Jantsch and
Muhammad Shafique },
title = { {MLComp}: A Methodology for Machine Learning-based
Performance Estimation and Adaptive Selection of
{Pareto}-Optimal Compiler Optimization Sequences },
key = {eml},
booktitle = {Proceedings of the Design, Automation and Test in Europe Conference and Exhibition },
year = 2021,
month = {March},
url = {http://jantsch.se/AxelJantsch/papers/2021/DavidJuhasz-DATE.pdf}
}