( = Paper PDF,
= Presentation slides,
= Presentation video)
Lizhi Liao; Simon Eismann; Heng Li; Cor-Paul Bezemer; Diego Costa; André van Hoorn; Weiyi Shang
Early Detection of Performance Regressions by Bridging Local Performance Data and Architectural Models Inproceedings
International Conference on Software Engineering (ICSE), 2025.
BibTeX | Tags: Performance, Performance analysis, Performance engineering, Performance evaluation, Performance regressions, Performance testing
@inproceedings{Liao_ICSE2025,
title = {Early Detection of Performance Regressions by Bridging Local Performance Data and Architectural Models},
author = {Lizhi Liao and Simon Eismann and Heng Li and Cor-Paul Bezemer and Diego Costa and André van Hoorn and Weiyi Shang},
year = {2025},
date = {2025-08-15},
urldate = {2025-08-15},
booktitle = {International Conference on Software Engineering (ICSE)},
keywords = {Performance, Performance analysis, Performance engineering, Performance evaluation, Performance regressions, Performance testing},
pubstate = {published},
tppubtype = {inproceedings}
}
Simon Eismann; Diego Costa; Lizhi Liao; Cor-Paul Bezemer; Weiyi Shang; André van Hoorn; Samuel Kounev
A Case Study on the Stability of Performance Tests for Serverless Applications Journal Article
Journal of Systems and Software, 2022.
Abstract | BibTeX | Tags: Performance engineering, Performance regressions, Performance testing, Serverless
@article{EismannJSS2022,
title = {A Case Study on the Stability of Performance Tests for Serverless Applications},
author = {Simon Eismann and Diego Costa and Lizhi Liao and Cor-Paul Bezemer and Weiyi Shang and André van Hoorn and Samuel Kounev},
year = {2022},
date = {2022-03-17},
urldate = {2022-03-17},
journal = {Journal of Systems and Software},
abstract = {Context. While in serverless computing, application resource management and operational concerns are generally delegated to the cloud provider, ensuring that serverless applications meet their performance requirements is still a responsibility of the developers. Performance testing is a commonly used performance assessment practice; however, it traditionally requires visibility of the resource environment.
Objective. In this study, we investigate whether performance tests of serverless applications are stable, that is, if their results are reproducible, and what implications the serverless paradigm has for performance tests.
Method. We conduct a case study where we collect two datasets of performance test results: (a) repetitions of performance tests for varying memory size and load intensities and (b) three repetitions of the same performance test every day for ten months.
Results. We find that performance tests of serverless applications are comparatively stable if conducted on the same day. However, we also observe short-term performance variations and frequent long-term performance changes.
Conclusion. Performance tests for serverless applications can be stable; however, the serverless model impacts the planning, execution, and analysis of performance tests.},
keywords = {Performance engineering, Performance regressions, Performance testing, Serverless},
pubstate = {published},
tppubtype = {article}
}
Objective. In this study, we investigate whether performance tests of serverless applications are stable, that is, if their results are reproducible, and what implications the serverless paradigm has for performance tests.
Method. We conduct a case study where we collect two datasets of performance test results: (a) repetitions of performance tests for varying memory size and load intensities and (b) three repetitions of the same performance test every day for ten months.
Results. We find that performance tests of serverless applications are comparatively stable if conducted on the same day. However, we also observe short-term performance variations and frequent long-term performance changes.
Conclusion. Performance tests for serverless applications can be stable; however, the serverless model impacts the planning, execution, and analysis of performance tests.
Cor-Paul Bezemer; Elric Milon; Andy Zaidman; Johan Pouwelse
Detecting and Analyzing I/O Performance Regressions Journal Article
Journal of Software: Evolution and Process (JSEP), 26 (12), pp. 1193–1212, 2014.
Abstract | BibTeX | Tags: Performance analysis, Performance optimization, Performance regressions
@article{Bezemer14jsep,
title = {Detecting and Analyzing I/O Performance Regressions},
author = {Cor-Paul Bezemer and Elric Milon and Andy Zaidman and Johan Pouwelse},
year = {2014},
date = {2014-07-17},
urldate = {2014-07-17},
journal = {Journal of Software: Evolution and Process (JSEP)},
volume = {26},
number = {12},
pages = {1193--1212},
publisher = {John Wiley & Sons, Ltd},
abstract = {Regression testing can be done by re-executing a test suite on different software versions and comparing the outcome. For functional testing, the outcome of such tests is either pass (correct behaviour) or fail (incorrect behaviour). For non-functional testing, such as performance testing, this is more challenging as correct and incorrect are not clearly defined concepts for these types of testing.
In this paper, we present an approach for detecting and analyzing I/O performance regressions. Our method is supplemental to existing profilers and its goal is to analyze the effect of source code changes on the performance of a system. In this paper, we focus on analyzing the amount of I/O writes being done. The open source implementation of our approach, SPECTRAPERF, is available for download.
We evaluate our approach in a field user study on Tribler, an open source peer-to-peer client and its decentralized solution for synchronizing messages, Dispersy. In this evaluation, we show that our approach can guide the performance optimization process, as it helps developers to find performance bottlenecks on the one hand, and on the other allows them to validate the effect of performance optimizations. In addition, we perform a feasibility study on Django, the most popular Python project on Github, to demonstrate our applicability on other projects. Copyright (c) 2013 John Wiley & Sons, Ltd.},
keywords = {Performance analysis, Performance optimization, Performance regressions},
pubstate = {published},
tppubtype = {article}
}
In this paper, we present an approach for detecting and analyzing I/O performance regressions. Our method is supplemental to existing profilers and its goal is to analyze the effect of source code changes on the performance of a system. In this paper, we focus on analyzing the amount of I/O writes being done. The open source implementation of our approach, SPECTRAPERF, is available for download.
We evaluate our approach in a field user study on Tribler, an open source peer-to-peer client and its decentralized solution for synchronizing messages, Dispersy. In this evaluation, we show that our approach can guide the performance optimization process, as it helps developers to find performance bottlenecks on the one hand, and on the other allows them to validate the effect of performance optimizations. In addition, we perform a feasibility study on Django, the most popular Python project on Github, to demonstrate our applicability on other projects. Copyright (c) 2013 John Wiley & Sons, Ltd.