The evaluation of computer intrusion detection systems (which we refer to as intrusion detection systems) is an active research area. In this paper, we survey and systematize common practices in the area of evaluation of intrusion detection systems. For this purpose, we define a design space structured into three parts: workload, metrics, and measurement methodology. We then provide an overview of the common practices in evaluation of intrusion detection systems by surveying evaluation approaches and methods related to each part of the design space. Finally, we discuss open issues and challenges focusing on evaluation methodologies for novel intrusion detection systems.
%0 Journal Article
%1 MiViKoAvPa2015-CSUR-IDSEval
%A Milenkoski, Aleksandar
%A Vieira, Marco
%A Kounev, Samuel
%A Avritzer, Alberto
%A Payne, Bryan D.
%C New York, NY, USA
%D 2015
%I ACM
%J ACM Computing Surveys
%K Metrics_and_benchmarking_methodologies Reliability Security Survey Virtualization descartes t_journalmagazine
%N 1
%P 12:1--12:41
%T Evaluating Computer Intrusion Detection Systems: A Survey of Common Practices
%U http://dl.acm.org/authorize?N06203
%V 48
%X The evaluation of computer intrusion detection systems (which we refer to as intrusion detection systems) is an active research area. In this paper, we survey and systematize common practices in the area of evaluation of intrusion detection systems. For this purpose, we define a design space structured into three parts: workload, metrics, and measurement methodology. We then provide an overview of the common practices in evaluation of intrusion detection systems by surveying evaluation approaches and methods related to each part of the design space. Finally, we discuss open issues and challenges focusing on evaluation methodologies for novel intrusion detection systems.
@article{MiViKoAvPa2015-CSUR-IDSEval,
abstract = {{The evaluation of computer intrusion detection systems (which we refer to as intrusion detection systems) is an active research area. In this paper, we survey and systematize common practices in the area of evaluation of intrusion detection systems. For this purpose, we define a design space structured into three parts: workload, metrics, and measurement methodology. We then provide an overview of the common practices in evaluation of intrusion detection systems by surveying evaluation approaches and methods related to each part of the design space. Finally, we discuss open issues and challenges focusing on evaluation methodologies for novel intrusion detection systems.}},
added-at = {2020-04-06T11:23:24.000+0200},
address = {New York, NY, USA},
author = {Milenkoski, Aleksandar and Vieira, Marco and Kounev, Samuel and Avritzer, Alberto and Payne, Bryan D.},
biburl = {https://www.bibsonomy.org/bibtex/25a775d02839785d2aeb3cc4b833b4686/se-group},
interhash = {4db8da564023e37b6002d8f5abaf88c0},
intrahash = {5a775d02839785d2aeb3cc4b833b4686},
journal = {{ACM Computing Surveys}},
keywords = {Metrics_and_benchmarking_methodologies Reliability Security Survey Virtualization descartes t_journalmagazine},
month = {{September}},
note = {{<b>5-year Impact Factor (2014): 5.949</b>}},
number = 1,
pages = {12:1--12:41},
publisher = {ACM},
timestamp = {2021-02-08T15:17:37.000+0100},
title = {{Evaluating Computer Intrusion Detection Systems: A Survey of Common Practices}},
url = {http://dl.acm.org/authorize?N06203},
volume = 48,
year = 2015
}