( = Paper PDF,
= Presentation slides,
= Presentation video)
1.
Cor-Paul Bezemer; Elric Milon; Andy Zaidman; Johan Pouwelse
Detecting and Analyzing I/O Performance Regressions Journal Article
Journal of Software: Evolution and Process (JSEP), 26 (12), pp. 1193–1212, 2014.
Abstract | BibTeX | Tags: Performance analysis, Performance optimization, Performance regressions
@article{Bezemer14jsep,
title = {Detecting and Analyzing I/O Performance Regressions},
author = {Cor-Paul Bezemer and Elric Milon and Andy Zaidman and Johan Pouwelse},
year = {2014},
date = {2014-07-17},
urldate = {2014-07-17},
journal = {Journal of Software: Evolution and Process (JSEP)},
volume = {26},
number = {12},
pages = {1193--1212},
publisher = {John Wiley & Sons, Ltd},
abstract = {Regression testing can be done by re-executing a test suite on different software versions and comparing the outcome. For functional testing, the outcome of such tests is either pass (correct behaviour) or fail (incorrect behaviour). For non-functional testing, such as performance testing, this is more challenging as correct and incorrect are not clearly defined concepts for these types of testing.
In this paper, we present an approach for detecting and analyzing I/O performance regressions. Our method is supplemental to existing profilers and its goal is to analyze the effect of source code changes on the performance of a system. In this paper, we focus on analyzing the amount of I/O writes being done. The open source implementation of our approach, SPECTRAPERF, is available for download.
We evaluate our approach in a field user study on Tribler, an open source peer-to-peer client and its decentralized solution for synchronizing messages, Dispersy. In this evaluation, we show that our approach can guide the performance optimization process, as it helps developers to find performance bottlenecks on the one hand, and on the other allows them to validate the effect of performance optimizations. In addition, we perform a feasibility study on Django, the most popular Python project on Github, to demonstrate our applicability on other projects. Copyright (c) 2013 John Wiley & Sons, Ltd.},
keywords = {Performance analysis, Performance optimization, Performance regressions},
pubstate = {published},
tppubtype = {article}
}
Regression testing can be done by re-executing a test suite on different software versions and comparing the outcome. For functional testing, the outcome of such tests is either pass (correct behaviour) or fail (incorrect behaviour). For non-functional testing, such as performance testing, this is more challenging as correct and incorrect are not clearly defined concepts for these types of testing.
In this paper, we present an approach for detecting and analyzing I/O performance regressions. Our method is supplemental to existing profilers and its goal is to analyze the effect of source code changes on the performance of a system. In this paper, we focus on analyzing the amount of I/O writes being done. The open source implementation of our approach, SPECTRAPERF, is available for download.
We evaluate our approach in a field user study on Tribler, an open source peer-to-peer client and its decentralized solution for synchronizing messages, Dispersy. In this evaluation, we show that our approach can guide the performance optimization process, as it helps developers to find performance bottlenecks on the one hand, and on the other allows them to validate the effect of performance optimizations. In addition, we perform a feasibility study on Django, the most popular Python project on Github, to demonstrate our applicability on other projects. Copyright (c) 2013 John Wiley & Sons, Ltd.
In this paper, we present an approach for detecting and analyzing I/O performance regressions. Our method is supplemental to existing profilers and its goal is to analyze the effect of source code changes on the performance of a system. In this paper, we focus on analyzing the amount of I/O writes being done. The open source implementation of our approach, SPECTRAPERF, is available for download.
We evaluate our approach in a field user study on Tribler, an open source peer-to-peer client and its decentralized solution for synchronizing messages, Dispersy. In this evaluation, we show that our approach can guide the performance optimization process, as it helps developers to find performance bottlenecks on the one hand, and on the other allows them to validate the effect of performance optimizations. In addition, we perform a feasibility study on Django, the most popular Python project on Github, to demonstrate our applicability on other projects. Copyright (c) 2013 John Wiley & Sons, Ltd.