This work deals with an NP-hard problem in graphs known as the weighted independent domination problem. We propose a biased random key genetic algorithm for solving this problem. The most important part of the proposed algorithm is a decoder that translates any vector of real-values into valid solutions to the tackled problem. The experimental results, in comparison to a state-of-the-art population-based iterated greedy algorithm from the literature, show that our proposed approach has advantages over the state-of-the-art algorithm in the context of the more dense graphs in which edges have higher weights than vertices.

}, author = {R{\'o}driguez Corominas, Guillem and Blum, Christian and Blesa, Maria J} } @book {56274, title = {HM 2019 - Proceedings of the 11th International Workshop on Hybrid Metaheuristics}, volume = {LNCS 11299}, year = {2019}, publisher = {Springer}, organization = {Springer}, doi = {https://doi.org/10.1007/978-3-030-05983-5}, author = {Blesa, Maria J and Blum, Christian and Gambini Santos, Haroldo and Pinacho Davidson, Pedro and Godoy del Campo, Julio} } @article {55971, title = {A Comprehensive Comparison of Metaheuristics for the Repetition-Free Longest Common Subsequence Problem}, journal = {Journal of Heuristics}, volume = {24}, year = {2018}, pages = {551-579}, abstract = {This paper deals with an NP-hard string problem from the bio-informatics field: the repetition-free longest common subsequence problem. This problem has enjoyed an increasing interest in recent years, which has resulted in the application of several pure as well as hybrid metaheuristics. However, the literature lacks a comprehensive comparison between those approaches. Moreover, it has been shown that general purpose integer linear programming solvers are very efficient for solving many of the problem instances that were used so far in the literature. Therefore, in this work we extend the available benchmark set, adding larger instances to which integer linear programming solvers cannot be applied anymore. Moreover, we provide a comprehensive comparison of the approaches found in the literature. Based on the results we propose a hybrid between two of the best methods which turns out to inherit the complementary strengths of both methods.

}, doi = {http://dx.doi.org/10.1007/s10732-017-9329-x}, author = {Blum, Christian and Blesa, Maria J} } @article {56027, title = {Hybrid Techniques Based on Solving Reduced Problem Instances for a Longest Common Subsequence Problem}, journal = {Applied Soft Computing}, volume = {62}, year = {2018}, pages = {15-28}, abstract = {Finding the longest common subsequence of a given set of input strings is a relevant problem arising in various practical settings. One of these problems is the so-called longest arc-preserving common subsequence problem. This NP-hard combinatorial optimization problem was introduced for the comparison of arc-annotated Ribonucleic acid (RNA) sequences. In this work we present an integer linear programming (ILP) formulation of the problem. As even in the context of rather small problem instances the application of a general purpose ILP solver is not viable due to the size of the model, we study alternative ways based on model reduction in order to take profit from this ILP model. First, we present a heuristic way for reducing the model, with the subsequent application of an ILP solver. Second, we propose the application of an iterative hybrid algorithm that makes use of an ILP solver for generating high quality solutions at each iteration. Experimental results concerning artificial and real problem instances show that the proposed techniques outperform an available technique from the literature.

}, doi = {https://doi.org/10.1016/j.asoc.2017.10.005}, author = {Blum, Christian and Blesa, Maria J} } @conference {56019, title = {An Alternative ILP Model and Algorithmic Ideas for the Maximum Edge-Disjoint Paths Problem}, booktitle = {Metaheuristics International Conference (MIC)}, year = {2017}, pages = {938-940}, abstract = {This document describes an alternative integer linear programming (ILP) model for the so-called edge-disjoint paths (EDP) problem in undirected graphs. EDP is an N P-hard problem where exact methods are not able to produce high quality solutions. Therefore, we propose two different algorithms for combining exact and heuristic methods. On the one hand, we consider a restricted model that limits the number of paths between two given nodes in the graphs (which reduces the search space exploration). On the other hand, the application of a mat-heuristic algorithm known as Construct, Merge, Solve and Adapt (CMSA) is considered. In this document we show some preliminary results concerning the restricted model. These results indicate the potential usefulness of the presented ideas.

}, author = {Blum, Christian and Blesa, Maria J and Duarte, Abraham and S{\'a}nchez-Oro, Jes{\'u}s} } @conference {56018, title = {On the comparison of CMSA versus LNS for solving Combinatorial Optimization problems with different solution sizes}, booktitle = {Metaheuristics International Conference (MIC)}, year = {2017}, pages = {676-678}, abstract = {Both, Construct, Merge Solve and Adapt (CMSA) and Large Neighborhood Search (LNS), are hybrid algorithms that are based on iteratively solving sub-instances of the original problem instances, if possible, to optimality. This is done by reducing the search space of the tackled problem instance in algorithm-specific ways which differ from one technique to the other. In this paper we provide first experimental evidence for the intuition that, conditioned by the way in which the search space is reduced, LNS should generally work better than CMSA in the context of problems in which solutions are rather large, and the opposite is the case for problems in which solutions are rather small. The size of a solution is hereby measured by the number of components of which the solution is composed, in comparison to the total number of solution components. In this ongoing work we are conducting experiments in the context of the multi-dimensional knapsack problem, the minimum-weight dominating set, and the single-source capacitated facility location problem.

}, author = {Liz{\'a}rraga, Evelia and Blesa, Maria J and Blum, Christian} } @proceedings {55973, title = {Construct, Merge, Solve and Adapt Versus Large Neighborhood Search for Solving the Multi-dimensional Knapsack Problem: Which One Works Better When?}, journal = {EvoCOP 2017 -- 17th European Conference on Evolutionary Computation in Combinatorial Optimization}, volume = {10197 (Lecture Notes in Computer Science)}, year = {2017}, pages = {60--74}, publisher = {Springer Verlag}, address = {Amsterdam}, abstract = {Both, Construct, Merge, Solve and Adapt (CMSA) and Large Neighborhood Search (LNS), are hybrid algorithms that are based on iteratively solving sub-instances of the original problem instances, if possible, to optimality. This is done by reducing the search space of the tackled problem instance in algorithm-specific ways which differ from one technique to the other. In this paper we provide first experimental evidence for the intuition that, conditioned by the way in which the search space is reduced, LNS should generally work better than CMSA in the context of problems in which solutions are rather large, and the opposite is the case for problems in which solutions are rather small. The size of a solution is hereby measured by the number of components of which the solution is composed, in comparison to the total number of solution components. Experiments are conducted in the context of the multi-dimensional knapsack problem.

}, doi = {https://doi.org/10.1007/978-3-319-55453-2_5}, author = {Liz{\'a}rraga, Evelia and Blesa, Maria J and Blum, Christian} } @proceedings {55981, title = {A Hybrid Evolutionary Algorithm Based on Solution Merging for the Longest Arc-Preserving Common Subsequence Problem}, journal = {CEC 2017 -- Congress on Evolutionary Computation}, year = {2017}, publisher = {IEEE Press}, abstract = {The longest arc-preserving common subsequence problem is an NP-hard combinatorial optimization problem from the field of computational biology. This problem finds applications, in particular, in the comparison of arc-annotated Ribonucleic acid (RNA) sequences. In this work we propose a simple, hybrid evolutionary algorithm to tackle this problem. The most important feature of this algorithm concerns a crossover operator based on solution merging. In solution merging, two or more solutions to the problem are merged, and an exact technique is used to find the best solution within this union. It is experimentally shown that the proposed algorithm outperforms a heuristic from the literature.

\

In this work, we consider the following NP-hard combinatorial optimization problem from computational biology. Given a set of input strings of equal length, the goal is to identify a maximum cardinality subset of strings that differ maximally in a pre-defined number of positions. First of all, we introduce an integer linear programming model for this problem. Second, two variants of a rather simple greedy strategy are proposed. Finally, a large neighborhood search algorithm is presented. A comprehensive experimental comparison among the proposed techniques shows, first, that larger neighborhood search generally outperforms both greedy strategies. Second, while large neighborhood search shows to be competitive with the stand-alone application of CPLEX for small- and medium-sized problem instances, it outperforms CPLEX in the context of larger instances.

}, issn = {1432-7643}, doi = {http://dx.doi.org/10.1007/s00500-016-2379-4}, author = {Liz{\'a}rraga, Evelia and Blesa, Maria J and Blum, Christian and Raidl, G{\"u}nther R} }