We provide a comprehensive, effective and very efficient methodology for the design and experimental analysis of algorithms. We rely on modern statistical techniques for tuning and understanding algorithms from an experimental perspective. Therefore, we make use of the sequential parameter optimization (SPO) method that has been successfully applied as a tuning procedure to numerous heuristics for practical and theoretical optimization problems. Two case studies, which illustrate the applicability of SPO to algorithm tuning and model selection, are presented.
@InProceedings{bartzbeielstein:DagSemProc.09181.5, author = {Bartz-Beielstein, Thomas}, title = {{Sequential Parameter Optimization}}, booktitle = {Sampling-based Optimization in the Presence of Uncertainty}, pages = {1--32}, series = {Dagstuhl Seminar Proceedings (DagSemProc)}, ISSN = {1862-4405}, year = {2009}, volume = {9181}, editor = {J\"{u}rgen Branke and Barry L. Nelson and Warren Buckler Powell and Thomas J. Santner}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/DagSemProc.09181.5}, URN = {urn:nbn:de:0030-drops-21159}, doi = {10.4230/DagSemProc.09181.5}, annote = {Keywords: Optimization, evolutionary algorithms, design of experiments} }
Feedback for Dagstuhl Publishing