This paper shows that existing software metric tools interpret and implement the definitions of object-oriented software metrics differently. This delivers tool-dependent metrics results and has even implications on the results of analyses based on these metrics results. In short, the metrics-based assessment of a software system and measures taken to improve its design differ considerably from tool to tool. To support our case, we conducted an experiment with a number of commercial and free metrics tools. We calculated metrics values using the same set of standard metrics for three software systems of different sizes. Measurements show that, for the same software system and metrics, the metrics values are tool depended. We also defined a (simple) software quality model for "maintainability" based on the metrics selected. It defines a ranking of the classes that are most critical wrt. maintainability. Measurements show that even the ranking of classes in a software system is metrics tool dependent.
%0 Conference Paper
%1 Lincke:2008:CSM:1390630.1390648
%A Lincke, Rüdiger
%A Lundberg, Jonas
%A Löwe, Welf
%B Proceedings of the 2008 international symposium on Software testing and analysis
%C New York, NY, USA
%D 2008
%I ACM
%K metrics software software.metrics
%P 131--142
%R 10.1145/1390630.1390648
%T Comparing software metrics tools
%U http://doi.acm.org/10.1145/1390630.1390648
%X This paper shows that existing software metric tools interpret and implement the definitions of object-oriented software metrics differently. This delivers tool-dependent metrics results and has even implications on the results of analyses based on these metrics results. In short, the metrics-based assessment of a software system and measures taken to improve its design differ considerably from tool to tool. To support our case, we conducted an experiment with a number of commercial and free metrics tools. We calculated metrics values using the same set of standard metrics for three software systems of different sizes. Measurements show that, for the same software system and metrics, the metrics values are tool depended. We also defined a (simple) software quality model for "maintainability" based on the metrics selected. It defines a ranking of the classes that are most critical wrt. maintainability. Measurements show that even the ranking of classes in a software system is metrics tool dependent.
%@ 978-1-60558-050-0
@inproceedings{Lincke:2008:CSM:1390630.1390648,
abstract = {This paper shows that existing software metric tools interpret and implement the definitions of object-oriented software metrics differently. This delivers tool-dependent metrics results and has even implications on the results of analyses based on these metrics results. In short, the metrics-based assessment of a software system and measures taken to improve its design differ considerably from tool to tool. To support our case, we conducted an experiment with a number of commercial and free metrics tools. We calculated metrics values using the same set of standard metrics for three software systems of different sizes. Measurements show that, for the same software system and metrics, the metrics values are tool depended. We also defined a (simple) software quality model for "maintainability" based on the metrics selected. It defines a ranking of the classes that are most critical wrt. maintainability. Measurements show that even the ranking of classes in a software system is metrics tool dependent.},
acmid = {1390648},
added-at = {2012-08-29T09:12:08.000+0200},
address = {New York, NY, USA},
author = {Lincke, R\"{u}diger and Lundberg, Jonas and L\"{o}we, Welf},
biburl = {https://www.bibsonomy.org/bibtex/21a92f8523cd4cf9f0370ddd3c9db7f87/ji},
booktitle = {Proceedings of the 2008 international symposium on Software testing and analysis},
description = {Comparing software metrics tools},
doi = {10.1145/1390630.1390648},
interhash = {3d4b19054cfe45a9605054fd49c57f79},
intrahash = {1a92f8523cd4cf9f0370ddd3c9db7f87},
isbn = {978-1-60558-050-0},
keywords = {metrics software software.metrics},
location = {Seattle, WA, USA},
numpages = {12},
pages = {131--142},
publisher = {ACM},
series = {ISSTA '08},
timestamp = {2012-08-29T09:12:08.000+0200},
title = {Comparing software metrics tools},
url = {http://doi.acm.org/10.1145/1390630.1390648},
year = 2008
}