rvb.bib

@conference{BBNN17,
  author = {{Bentert}, M. and {van Bevern}, R. and
                  {Nichterlein}, A. and {Niedermeier}, R.},
  title = {Parameterized algorithms for power-efficient
                  connected symmetric wireless sensor networks},
  booktitle = {Proceedings of the 13th International Symposium on
                  Algorithms and Experiments for Wireless Networks
                  (ALGOSENSORS'17), Wien, Austria},
  year = 2017,
  series = {Lecture Notes in Computer Science},
  note = {To appear},
  publisher = {Springer},
  date = {2017-09-04},
  url = {https://arxiv.org/abs/1706.03177},
  abstract = {We study an NP-hard problem motivated by
                  energy-efficiently maintaining the connectivity of a
                  symmetric wireless sensor communication
                  network. Given an edge-weighted n-vertex graph, find
                  a connected spanning subgraph of minimum cost, where
                  the cost is determined by letting each vertex pay
                  the most expensive edge incident to it in the
                  subgraph. We provide an algorithm that works in
                  polynomial time if one can find a set of obligatory
                  edges that yield a spanning subgraph with O(logn)
                  connected components. We also provide a linear-time
                  algorithm that reduces any input graph that consists
                  of a tree together with g additional edges to an
                  equivalent graph with O(g) vertices. Based on this,
                  we obtain a polynomial-time algorithm for
                  g∈O(logn). On the negative side, we show that
                  o(logn)-approximating the difference d between the
                  optimal solution cost and a natural lower bound is
                  NP-hard and that there are presumably no exact
                  algorithms running in 2o(n) time or in f(d)⋅nO(1)
                  time for any computable function f.}
}
@article{MBxx,
  author = {Matthias Mnich and Ren{\'{e}} van Bevern},
  title = {Parameterized complexity of machine scheduling: 15
                  open problems},
  year = 2017,
  date = {2017-09-06},
  url = {http://arxiv.org/abs/1709.01670},
  abstract = {Machine scheduling problems are a long-time key
                  domain of algorithms and complexity research. A
                  novel approach to machine scheduling problems are
                  fixed-parameter algorithms. To stimulate this
                  thriving research direction, we propose 15
                  interesting open questions in this area.}
}
@article{BFM+xx,
  title = {The parameterized complexity of finding secluded
                  solutions to some classical optimization problems on
                  graphs},
  author = {René van Bevern and Till Fluschnik and George
                  B. Mertzios and Hendrik Molter and Manuel Sorge and
                  Ondřej Suchý},
  date = {2017-06-01},
  year = 2017,
  url = {https://arxiv.org/abs/1606.09000v4},
  abstract = {This work studies the parameterized complexity of
                  finding secluded solutions to classical
                  combinatorial optimization problems on graphs such
                  as finding minimum s-t separators, feedback vertex
                  sets, dominating sets, maximum independent sets, and
                  vertex deletion problems for hereditary graph
                  properties: Herein, one searches not only to
                  minimize or maximize the size of the solution, but
                  also to minimize the size of its neighborhood. This
                  restriction has applications in secure routing and
                  community detection.}
}
@article{BKS17,
  title = {A parameterized approximation algorithm for the
                  mixed and windy Capacitated Arc Routing Problem:
                  theory and experiments},
  author = {René van Bevern and Christian Komusiewicz and Manuel
                  Sorge},
  journal = {Networks},
  volume = {70},
  number = {3},
  issn = {1097-0037},
  doi = {10.1002/net.21742},
  pages = {262--278},
  keywords = {vehicle routing, transportation, Rural Postman,
                  Chinese Postman, NP-hard problem, fixed-parameter
                  algorithm, combinatorial optimization},
  year = {2017},
  note = {WARP 2 special issue},
  url = {https://arxiv.org/abs/1506.05620v2},
  doi = {10.1002/net.21742},
  slides = {http://rvb.su/pdf/approximation-directed-arc-routing-atmos15.pdf},
  sourcecode = {https://gitlab.com/rvb/mwcarp-approx},
  date = {2017-10-01},
  abstract = {We prove that any polynomial-time α(n)-approximation
                  algorithm for the n-vertex metric asymmetric
                  Traveling Salesperson Problem yields a
                  polynomial-time O(α(C))-approximation algorithm for
                  the mixed and windy Capacitated Arc Routing Problem,
                  where C is the number of weakly connected components
                  in the subgraph induced by the positive-demand
                  arcs---a small number in many applications.  In
                  conjunction with known results, we obtain
                  constant-factor approximations for C∈O(log n) and
                  O(log C/log log C)-approximations in general.
                  Experiments show that our algorithm, together with
                  several heuristic enhancements, outperforms many
                  previous polynomial-time heuristics.  Finally, since
                  the solution quality achievable in polynomial time
                  appears to mainly depend on C and since C=1 in
                  almost all benchmark instances, we propose the Ob
                  benchmark set, simulating cities that are divided
                  into several components by a river.}
}
@conference{BFM+17,
  author = {René van Bevern and Till Fluschnik and George
                  B. Mertzios and Hendrik Molter and Manuel Sorge and
                  Ondřej Suchý},
  title = {Finding Secluded Places of Special Interest in
                  Graphs},
  date = {2017-02-10},
  booktitle = {Proceedings of the 11th International Symposium on
                  Parameterized and Exact Computation (IPEC'16),
                  Aarhus, Denmark},
  abstract = {Finding a vertex subset in a graph that satisfies a
                  certain property is one of the most-studied topics
                  in algorithmic graph theory. The focus herein is
                  often on minimizing or maximizing the size of the
                  solution, that is, the size of the desired vertex
                  set. In several applications, however, we also want
                  to limit the "exposure" of the solution to the rest
                  of the graph. This is the case, for example, when
                  the solution represents persons that ought to deal
                  with sensitive information or a segregated
                  community. In this work, we thus explore the
                  (parameterized) complexity of finding such secluded
                  vertex subsets for a wide variety of properties that
                  they shall fulfill. More precisely, we study the
                  constraint that the (open or closed) neighborhood of
                  the solution shall be bounded by a parameter and the
                  influence of this constraint on the complexity of
                  minimizing separators, feedback vertex sets, F-free
                  vertex deletion sets, dominating sets, and the
                  maximization of independent sets.},
  pages = {5:1--5:16},
  series = {Leibniz International Proceedings in Informatics
                  (LIPIcs)},
  isbn = {978-3-95977-023-1},
  issn = {1868-8969},
  year = {2017},
  volume = {63},
  publisher = {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},
  url = {http://drops.dagstuhl.de/opus/volltexte/2017/6938},
  doi = {10.4230/LIPIcs.IPEC.2016.5}
}
@article{BFKxx,
  title = {Parameterizing edge modification problems above lower
                  bounds},
  author = {René van Bevern and Vincent Froese and Christian
                  Komusiewicz},
  url = {http://arxiv.org/abs/1512.04047v3},
  doi = {10.1007/s00224-016-9746-5},
  slides = {http://rvb.su/pdf/above-guarantee-editing-csr16.pdf},
  year = 2017,
  date = {2017-01-18},
  journal = {Theory of Computing Systems},
  abstract = {We study the parameterized complexity of a variant of
                  the F-free Editing problem: Given a graph G and a
                  natural number k, is it possible to modify at most k
                  edges in G so that the resulting graph contains no
                  induced subgraph isomorphic to F?  In our variant, the
                  input additionally contains a vertex-disjoint packing
                  H of induced subgraphs of G, which provides a lower
                  bound h(H) on the number of edge modifications
                  required to transform G into an F-free graph.  While
                  earlier works used the number k as parameter or
                  structural parameters of the input graph G, we
                  consider instead the parameter l:=k-h(H), that is, the
                  number of edge modifications above the lower bound
                  h(H).  We develop a framework of generic data
                  reduction rules to show fixed-parameter tractability
                  with respect to l for K_3-Free Editing, Feedback Arc
                  Set in Tournaments, and Cluster Editing when the
                  packing H contains subgraphs with bounded solution
                  size.  For K_3-Free Editing, we also prove NP-hardness
                  in case of edge-disjoint packings of K_3s and l=0,
                  while for K_q-Free Editing and qge 6, NP-hardness for
                  l=0 even holds for vertex-disjoint packings of K_qs.
                  In addition, we provide NP-hardness results for F-free
                  Vertex Deletion, were the aim is to delete a minimum
                  number of vertices to make the input graph F-free.},
  keywords = {algorithmic graph theory, graph modification, invited,
                  network analysis, NP-hard, parameterized complexity},
  note = {CSR'16 special issue.  In press}
}
@conference{BKK+16,
  title = {Twins in Subdivision Drawings of Hypergraphs},
  author = {René van Bevern and Iyad Kanj and Christian
                  Komusiewicz and Rolf Niedermeier and Manuel Sorge},
  year = 2016,
  date = {2016-12-08},
  booktitle = {Proceedings of the 24th International Symposium on
                  Graph Drawing and Network Visualization (GD'16),
                  Athens, Greece},
  volume = {9801},
  series = {Lecture Notes in Computer Science},
  publisher = {Springer},
  url = {http://arxiv.org/abs/1511.09389v3},
  abstract = {Visualizing hypergraphs, systems of subsets of some
                  universe, has continuously attracted research interest
                  in the last decades. We study a natural kind of
                  hypergraph visualization called subdivision drawings.
                  Dinkla et al. [Comput. Graph. Forum ’12] claimed that
                  only few hypergraphs have a subdivision
                  drawing. However, this statement seems to be based on
                  the assumption (also used in previous work) that the
                  input hypergraph does not contain twins, pairs of
                  vertices which are in precisely the same hyperedges
                  (subsets of the universe). We show that such vertices
                  may be necessary for a hypergraph to admit a
                  subdivision drawing. As a counterpart, we show that
                  the number of such “necessary twins” is upper-bounded
                  by a function of the number m of hyperedges and a
                  further parameter r of the desired drawing related to
                  its number of layers. This leads to a linear-time
                  algorithm for determining such subdivision draw- ings
                  if m and r are constant; in other words, the problem
                  is linear-time fixed-parameter tractable with respect
                  to the parameters m and r.},
  keywords = {algorithmic graph theory, graph modification},
  pages = {67--80},
  isbn = {978-3-319-50106-2},
  doi = {10.1007/978-3-319-50106-2_6}
}
@article{BKN+16,
  title = {H-Index Manipulation by Merging Articles: Models,
                  Theory, and Experiments},
  author = {René van Bevern and Christian Komusiewicz and Rolf
                  Niedermeier and Manuel Sorge and Toby Walsh},
  url = {http://arxiv.org/abs/1412.5498v3},
  sourcecode = {http://fpt.akt.tu-berlin.de/hindex/},
  year = 2016,
  date = {2016-11-01},
  journal = {Artificial Intelligence},
  volume = 240,
  pages = {19--35},
  abstract = {An author's profile on Google Scholar consists of
                  indexed articles and associated data, such as the
                  number of citations and the H-index. The author is
                  allowed to merge articles, which may affect the
                  H-index. We analyze the (parameterized) computational
                  complexity of maximizing the H-index using article
                  merges. Herein, to model realistic manipulation
                  scenarios, we define a compatibility graph whose edges
                  correspond to plausible merges. Moreover, we consider
                  several different measures for computing the citation
                  count of a merged article. For the measure used by
                  Google Scholar, we give an algorithm that maximizes
                  the H-index in linear time if the compatibility graph
                  has constant-size connected components. In contrast,
                  if we allow to merge arbitrary articles (that is, for
                  arbitrary compatibility graphs), then already
                  increasing the H-index by one is NP-hard. Experiments
                  on Google Scholar profiles of AI researchers show that
                  the H-index can be manipulated substantially only if
                  one merges articles with highly dissimilar titles.},
  doi = {10.1016/j.artint.2016.08.001},
  keywords = {network analysis, parameterized complexity}
}
@conference{BBB+16,
  title = {Precedence-constrained scheduling problems
                  parameterized by partial order width},
  author = {René van Bevern and Robert Bredereck and Laurent
                  Bulteau and Christian Komusiewicz and Nimrod Talmon
                  and Gerhard J. Woeginger},
  url = {http://arxiv.org/abs/1605.00901v1},
  doi = {10.1007/978-3-319-44914-2_9},
  slides = {http://rvb.su/pdf/p-prec-cmax-door16.pdf},
  year = {2016},
  date = {2016-09-19},
  booktitle = {Proceedings of the 9th International Conference on
                  Discrete Optimization and Operations Research
                  (DOOR'16), Vladivostok, Russian Federation},
  publisher = {Springer},
  abstract = {Negatively answering a question posed by Mnich and
                  Wiese (Math. Program. 154(1-2):533-562), we show that
                  P2|prec,pj∈{1,2}|Cmax, the problem of finding a
                  non-preemptive minimum-makespan schedule for
                  precedence-constrained jobs of lengths 1 and 2 on two
                  parallel identical machines, is W[2]-hard
                  parameterized by the width of the partial order giving
                  the precedence constraints. To this end, we show that
                  Shuffle Product, the problem of deciding whether a
                  given word can be obtained by interleaving the letters
                  of k other given words, is W[2]-hard parameterized by
                  k, thus additionally answering a question posed by
                  Rizzi and Vialette (CSR 2013). Finally, refining a
                  geometric algorithm due to Servakh
                  (Diskretn. Anal. Issled. Oper. 7(1):75-82), we show
                  that the more general Resource-Constrained Project
                  Scheduling problem is fixed-parameter tractable
                  parameterized by the partial order width combined with
                  the maximum allowed difference between the earliest
                  possible and factual starting time of a job. },
  pages = {105-120},
  volume = {9869},
  series = {Lecture Notes in Computer Science},
  keywords = {NP-hard, parameterized complexity, scheduling}
}
@conference{BKM+16,
  title = {H-Index Manipulation by Undoing Merges},
  author = {René van Bevern and Christian Komusiewicz and Hendrik
                  Molter and Rolf Niedermeier and Manuel Sorge and Toby
                  Walsh},
  doi = {10.3233/978-1-61499-672-9-895},
  url = {http://arxiv.org/abs/1604.04827v2},
  year = {2016},
  date = {2016-08-29},
  booktitle = {Proceedings of the 22nd European Conference on
                  Artificial Intelligence (ECAI'16), The Hague, Holland},
  abstract = {The h-index is one of the most important bibliographic
                  measures used to assess the performance of
                  researchers. Van Bevern et al. [Artif. Intel., in
                  press] showed that, despite computational worst-case
                  hardness results, substantial manipulation of the
                  h-index of Google Scholar author profiles is possible
                  by merging articles. Complementing previous work, we
                  study the opposite operation, the splitting of
                  articles, which is arguably the more natural operation
                  for manipulation and which is also allowed within
                  Google Scholar. We present numerous results on
                  computational complexity (from linear-time algorithms
                  to parameterized hardness results) and empirically
                  indicate that at least small improvements of the
                  h-index by splitting merged articles are easily
                  achievable.},
  volume = {285},
  pages = {895--903},
  series = {Frontiers in Artificial Intelligence and Applications},
  keywords = {graph modification, NP-hard, parameterized complexity}
}
@unpublished{BPxx,
  title = {A fixed-parameter algorithm for a routing open shop
                  problem: unit processing times, few machines and
                  locations},
  author = {René van Bevern and Artem V. Pyatkin},
  url = {http://arxiv.org/abs/1603.01191v3},
  slides = {http://rvb.su/pdf/routing-open-shop-csr16.pdf},
  year = 2017,
  date = {2017-04-27},
  abstract = {The open shop problem is to find a minimum makespan
                  schedule to process each job Ji on each machine Mq
                  for piq time such that, at any time, each machine
                  processes at most one job and each job is processed
                  by at most one machine. We study a problem variant
                  in which the jobs are located in the vertices of an
                  edge-weighted graph. The weights determine the time
                  needed for the machines to travel between jobs in
                  different vertices. We show that the problem with m
                  machines and n unit-time jobs in g vertices is
                  solvable in 2O(gm2loggm)+O(mnlogn) time.},
  keywords = {NP-hard, parameterized complexity, routing,
                  scheduling}
}
@article{BBB+17,
  title = {Partitioning perfect graphs into stars},
  author = {René van Bevern and Robert Bredereck and Laurent
                  Bulteau and Jiehua Chen and Vincent Froese and Rolf
                  Niedermeier and Gerhard J. Woeginger},
  url = {http://arxiv.org/abs/1402.2589v3},
  doi = {10.1002/jgt.22062},
  year = 2017,
  date = {2017-06-01},
  journal = {Journal of Graph Theory},
  abstract = {The partition of graphs into nice subgraphs is a
                  central algorithmic problem with strong ties to
                  matching theory. We study the partitioning of
                  undirected graphs into stars of the same size, a
                  problem known to be NP-complete even for the case of
                  stars on three vertices. We perform a thorough
                  computational complexity study of the problem on
                  subclasses of perfect graphs and identify several
                  polynomial-time solvable cases, for example, on
                  interval graphs and bipartite permutation graphs,
                  and also NP-complete cases, for example, on grid
                  graphs and chordal graphs.},
  volume = 85,
  number = 2,
  pages = {297--335},
  keywords = {algorithmic graph theory}
}
@conference{BFK16,
  title = {Parameterizing edge modification problems above
                  lower bounds},
  author = {René van Bevern and Vincent Froese and Christian
                  Komusiewicz},
  url = {http://arxiv.org/abs/1512.04047v1},
  doi = {10.1007/978-3-319-34171-2_5},
  slides = {http://rvb.su/pdf/above-guarantee-editing-csr16.pdf},
  isbn = {978-3-319-34170-5},
  year = {2016},
  date = {2016-05-31},
  booktitle = {Proceedings of the 11th International Computer
                  Science Symposium in Russia (CSR'16),
                  St. Petersburg, Russian Federation},
  volume = {9691},
  pages = {57-72},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {For a fixed graph F, we study the parameterized
                  complexity of a variant of the F-Free Editing
                  problem: Given a graph G and a natural number k, is
                  it possible to modify at most k edges in G so that
                  the resulting graph contains no induced subgraph
                  isomorphic to F? In our variant, the input
                  additionally contains a vertex-disjoint packing H of
                  induced subgraphs of G, which provides a lower bound
                  h(H) on the number of edge modifications required to
                  transform G into an F-free graph. While earlier
                  works used the number k as parameter or structural
                  parameters of the input graph G, we consider the
                  parameter l:=k-h(H) instead, that is, the number of
                  edge modifications above the lower bound h(H). We
                  show fixed-parameter tractability with respect to l
                  for K_3-Free Editing, Feedback Arc Set in
                  Tournaments, and Cluster Editing when the packing H
                  contains subgraphs with bounded modification
                  cost. For K3-Free Editing, we also prove NP-hardness
                  in case of edge-disjoint packings of K3s and l=0,
                  while for Kq-Free Editing and q≥6, NP-hardness for
                  l=0 even holds for vertex-disjoint packings of Kq.},
  keywords = {graph modification, parameterized complexity},
  note = {The final version of this article appeared in the
                  CSR'16 special issue of Theory of Computing Systems}
}
@conference{BP16,
  title = {Completing partial schedules for Open Shop with unit
                  processing times and routing},
  author = {René van Bevern and Artem V. Pyatkin},
  url = {http://arxiv.org/abs/1603.01191v1},
  doi = {10.1007/978-3-319-34171-2_6},
  slides = {http://rvb.su/pdf/routing-open-shop-csr16.pdf},
  isbn = {978-3-319-34170-5},
  year = {2016},
  date = {2016-05-31},
  booktitle = {Proceedings of the 11th International Computer Science
                  Symposium in Russia (CSR'16), St. Petersburg, Russian
                  Federation},
  volume = {9691},
  pages = {73-87},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {Open Shop is a classical scheduling problem: given a
                  set J of jobs and a set M of machines, find a
                  minimum-makespan schedule to process each job J_i∈J on
                  each machine M_q∈M for a given amount p_iq of time
                  such that each machine processes only one job at a
                  time and each job is processed by only one machine at
                  a time. In Routing Open Shop, the jobs are located in
                  the vertices of an edge-weighted graph G=(V,E) whose
                  edge weights determine the time needed for the
                  machines to travel between jobs. Routing Open Shop is
                  NP-hard for |V|=|M|=2. For the special case of unit
                  processing times p_iq=1, we exploit Galvin's theorem
                  about list-coloring edges of bipartite graphs to prove
                  a theorem that gives a sufficient condition for the
                  completability of partial schedules. Exploiting this
                  schedule completion theorem and integer linear
                  programming, we show that Routing Open Shop with unit
                  processing times is solvable in 2^O(|V||M|^2 log
                  |V||M|) poly(|J|)) time, that is, fixed-parameter
                  tractable parameterized by |V|+|M|. Various upper
                  bounds shown using the schedule completion theorem
                  suggest it to be likewise beneficial for the
                  development of approximation algorithms.},
  keywords = {NP-hard, parameterized complexity, routing,
                  scheduling}
}
@article{FBNS16,
  title = {Exploiting hidden structure in selecting dimensions
                  that distinguish vectors},
  author = {Vincent Froese and René van Bevern and Rolf
                  Niedermeier and Manuel Sorge},
  url = {http://arxiv.org/abs/1512.01150v2},
  doi = {10.1016/j.jcss.2015.11.011},
  issn = {0022-0000},
  year = 2016,
  date = {2016-05-01},
  journal = {Journal of Computer and System Sciences},
  volume = 82,
  number = 3,
  pages = {521--535},
  abstract = {The NP-hard Distinct Vectors problem asks to delete as
                  many columns as possible from a matrix such that all
                  rows in the resulting matrix are still pairwise
                  distinct. Our main result is that, for binary
                  matrices, there is a complexity dichotomy for Distinct
                  Vectors based on the maximum (H) and the minimum (h)
                  pairwise Hamming distance between matrix rows:
                  Distinct Vectors can be solved in polynomial time if
                  H≤2⌈h/2⌉+1, and is NP-complete otherwise. Moreover, we
                  explore connections of Distinct Vectors to hitting
                  sets, thereby providing several fixed-parameter
                  tractability and intractability results also for
                  general matrices.}
}
@article{BNS17,
  title = {A parameterized complexity view on non-preemptively
                  scheduling interval-constrained jobs: few machines,
                  small looseness, and small slack},
  author = {René van Bevern and Rolf Niedermeier and Ondřej
                  Suchý},
  url = {http://arxiv.org/abs/1508.01657v2},
  doi = {10.1007/s10951-016-0478-9},
  slides = {http://rvb.su/pdf/looseness-slack-door16.pdf},
  issn = {1094-6136},
  year = 2017,
  date = {2017-06-01},
  journal = {Journal of Scheduling},
  publisher = {Springer},
  abstract = {We study the problem of non-preemptively scheduling
                  n jobs, each job j with a release time t_j, a
                  deadline d_j, and a processing time p_j, on m
                  parallel identical machines.  Cieliebak et
                  al. considered the two constraints |d_j-t_j| ≤ λp_j
                  and |d_j-t_j| ≤ p_j+σ and showed the problem to be
                  NP-hard for any λ>1 and for any σ≥2.  We complement
                  their results by parameterized complexity studies:
                  we show that, for any λ>1, the problem remains
                  weakly NP-hard even for m=2 and strongly W[1]-hard
                  parameterized by m.  We present a
                  pseudo-polynomial-time algorithm for constant m and
                  λ and a fixed-parameter tractability result for the
                  parameter m combined with σ.},
  volume = 20,
  number = 3,
  pages = {255–265}
}
@article{BCH+15,
  title = {Approximability and parameterized complexity of
                  multicover by $c$-intervals},
  author = {René van Bevern and Jiehua Chen and Falk Hüffner and
                  Stefan Kratsch and Nimrod Talmon and Gerhard
                  J. Woeginger},
  url = {https://fpt.akt.tu-berlin.de/publications/c-interval-cover-IPL.pdf},
  doi = {10.1016/j.ipl.2015.03.004},
  issn = {0020-0190},
  year = 2015,
  date = {2015-10-01},
  journal = {Information Processing Letters},
  volume = 115,
  number = 10,
  pages = {744--749},
  abstract = {A c-interval is the disjoint union of c intervals over
                  N. The c-Interval Multicover problem is the special
                  case of Set Multicover where all sets available for
                  covering are c-intervals. We strengthen known
                  APX-hardness results for c-Interval Multicover, show
                  W[1]-hardness when parameterized by the solution size,
                  and present fixed-parameter algorithms for alternative
                  parameterizations.},
  keywords = {scheduling}
}
@article{BMNW15,
  title = {Interval scheduling and colorful independent sets},
  author = {René van Bevern and Matthias Mnich and Rolf
                  Niedermeier and Mathias Weller},
  url = {http://arxiv.org/abs/1402.0851v2},
  doi = {10.1007/s10951-014-0398-5},
  sourcecode = {http://fpt.akt.tu-berlin.de/cis},
  issn = {1094-6136},
  year = 2015,
  date = {2015-10-01},
  journal = {Journal of Scheduling},
  volume = 18,
  number = 5,
  pages = {449-469},
  publisher = {Springer},
  abstract = {Numerous applications in scheduling, such as resource
                  allocation or steel manufacturing, can be modeled
                  using the NP-hard Independent Set problem (given an
                  undirected graph and an integer k, find a set of at
                  least k pairwise non-adjacent vertices). Here, one
                  encounters special graph classes like 2-union graphs
                  (edge-wise unions of two interval graphs) and strip
                  graphs (edge-wise unions of an interval graph and a
                  cluster graph), on which Independent Set remains
                  NP-hard but admits constant ratio approximations in
                  polynomial time. We study the parameterized complexity
                  of Independent Set on 2-union graphs and on subclasses
                  like strip graphs. Our investigations significantly
                  benefit from a new structural “compactness” parameter
                  of interval graphs and novel problem formulations
                  using vertex-colored interval graphs. Our main
                  contributions are as follows: 1. We show a complexity
                  dichotomy: restricted to graph classes closed under
                  induced subgraphs and disjoint unions, Independent Set
                  is polynomial-time solvable if both input interval
                  graphs are cluster graphs, and is NP-hard
                  otherwise. 2. We chart the possibilities and limits of
                  effective polynomial-time preprocessing (also known as
                  kernelization). 3. We extend Halldórsson and Karlsson
                  (2006)’s fixed-parameter algorithm for Independent Set
                  on strip graphs parameterized by the structural
                  parameter “maximum number of live jobs” to show that
                  the problem (also known as Job Interval Selection) is
                  fixed-parameter tractable with respect to the
                  parameter k and generalize their algorithm from strip
                  graphs to 2-union graphs. Preliminary experiments with
                  random data indicate that Job Interval Selection with
                  up to 15 jobs and 5⋅10^5 intervals can be solved
                  optimally in less than 5 min.},
  keywords = {scheduling}
}
@conference{BKS15,
  title = {Approximation Algorithms for Mixed, Windy, and
                  Capacitated Arc Routing Problems},
  author = {René van Bevern and Christian Komusiewicz and Manuel
                  Sorge},
  url = {http://drops.dagstuhl.de/opus/volltexte/2015/5457},
  doi = {10.4230/OASIcs.ATMOS.2015.130},
  slides = {http://rvb.su/pdf/approximation-directed-arc-routing-atmos15.pdf},
  sourcecode = {https://gitlab.com/rvb/mwcarp-approx},
  longversion = {BKSxx.html},
  issn = {2190-6807},
  year = {2015},
  date = {2015-09-17},
  booktitle = {Proceedings of the 15th Workshop on Algorithmic
                  Approaches for Transportation Modelling,
                  Optimization, and Systems (ATMOS'15), Patras,
                  Greece},
  volume = {48},
  pages = {130--143},
  publisher = {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},
  series = {OpenAccess Series in Informatics (OASIcs)},
  abstract = {We show that any alpha(n)-approximation algorithm
                  for the n-vertex metric asymmetric Traveling
                  Salesperson problem yields O(alpha(C))-approximation
                  algorithms for various mixed, windy, and capacitated
                  arc routing problems. Herein, C is the number of
                  weakly-connected components in the subgraph induced
                  by the positive-demand arcs, a number that can be
                  expected to be small in applications. In conjunction
                  with known results, we derive constant-factor
                  approximations if C is in O(log n) and
                  O(log(C)/log(log(C)))-approximations in general.},
  note = {Best paper award.  The final version of this article
                  appeared in the WARP 2 special issue of Networks.},
  keywords = {routing},
  sourcecode = {http://gitlab.com/rvb/mwcarp-approx/}
}
@article{BFSS15,
  title = {On the Parameterized Complexity of Computing Balanced
                  Partitions in Graphs},
  author = {René van Bevern and Andreas Emil Feldmann and Manuel
                  Sorge and Ondřej Suchý},
  url = {http://arxiv.org/abs/1312.7014v2},
  doi = {10.1007/s00224-014-9557-5},
  slides = {http://rvb.su/pdf/bipartition.pdf},
  issn = {1432-4350},
  year = 2015,
  date = {2015-07-08},
  journal = {Theory of Computing Systems},
  volume = 57,
  number = 1,
  pages = {1-35},
  publisher = {Springer},
  abstract = {A balanced partition is a clustering of a graph into a
                  given number of equal-sized parts. For instance, the
                  Bisection problem asks to remove at most k edges in
                  order to partition the vertices into two equal-sized
                  parts. We prove that Bisection is FPT for the distance
                  to constant cliquewidth if we are given the deletion
                  set. This implies FPT algorithms for some well-studied
                  parameters such as cluster vertex deletion number and
                  feedback vertex set. However, we show that Bisection
                  does not admit polynomial-size kernels for these
                  parameters. For the Vertex Bisection problem, vertices
                  need to be removed in order to obtain two equal-sized
                  parts. We show that this problem is FPT for the number
                  of removed vertices k if the solution cuts the graph
                  into a constant number c of connected components. The
                  latter condition is unavoidable, since we also prove
                  that Vertex Bisection is W[1]-hard w.r.t. (k,c). Our
                  algorithms for finding bisections can easily be
                  adapted to finding partitions into d equal-sized
                  parts, which entails additional running time factors
                  of n^O(d). We show that a substantial speed-up is
                  unlikely since the corresponding task is W[1]-hard
                  w.r.t. d, even on forests of maximum degree two. We
                  can, however, show that it is FPT for the vertex cover
                  number.},
  keywords = {graph modification}
}
@unpublished{BKK+xx,
  title = {Well-Formed Separator Sequences, with an Application
                  to Hypergraph Drawing},
  author = {René van Bevern and Iyad Kanj and Christian
                  Komusiewicz and Rolf Niedermeier and Manuel Sorge},
  url = {http://arxiv.org/abs/1507.02350},
  year = 2015,
  date = {2015-07-01},
  abstract = {Given a hypergraph H, the Planar Support problem asks
                  whether there is a planar graph G on the same vertex
                  set as H such that each hyperedge induces a connected
                  subgraph of G. Planar Support is motivated by
                  applications in graph drawing and data
                  visualization. We show that Planar Support is
                  fixed-parameter tractable when parameterized by the
                  number of hyperedges in the input hypergraph and the
                  outerplanarity number of the sought planar graph. To
                  this end, we develop novel structural results for
                  r-outerplanar triangulated disks, showing that they
                  admit sequences of separators with structural
                  properties enabling data reduction. This allows us to
                  obtain a problem kernel for Planar Support, thus
                  showing its fixed-parameter tractability.},
  keywords = {algorithmic graph theory, hypergraphs}
}
@conference{BKN+15,
  title = {H-Index Manipulation by Merging Articles: Models,
                  Theory, and Experiments},
  author = {René van Bevern and Christian Komusiewicz and Rolf
                  Niedermeier and Manuel Sorge and Toby Walsh},
  url = {http://ijcai.org/papers15/Papers/IJCAI15-119.pdf},
  longversion = {BKN+16.html},
  isbn = {978-1-57735-738-4},
  year = {2015},
  date = {2015-07-01},
  booktitle = {Proceedings of the 24th International Joint Conference
                  on Artificial Intelligence, IJCAI'15, Buenos Aires,
                  Argentina},
  pages = {808--814},
  publisher = {AAAI Press},
  abstract = {An author's profile on Google Scholar consists of
                  indexed articles and associated data, such as the
                  number of citations and the H-index. The author is
                  allowed to merge articles, which may affect the
                  H-index. We analyze the parameterized complexity of
                  maximizing the H-index using article merges. Herein,
                  to model realistic manipulation scenarios, we define a
                  compatability graph whose edges correspond to
                  plausible merges. Moreover, we consider multiple
                  possible measures for computing the citation count of
                  a merged article. For the measure used by Google
                  Scholar, we give an algorithm that maximizes the
                  H-index in linear time if the compatibility graph has
                  constant-size connected components. In contrast, if we
                  allow to merge arbitrary articles, then already
                  increasing the H-index by one is NP-hard. Experiments
                  on Google Scholar profiles of AI researchers show that
                  the H-index can be manipulated substantially only by
                  merging articles with highly dissimilar titles, which
                  would be easy to discover.},
  keywords = {network analysis},
  note = {The final version of this article appeared in
                  Artificial Intelligence}
}
@article{BBC+15,
  title = {Network-Based Vertex Dissolution},
  author = {René van Bevern and Robert Bredereck and Jiehua Chen
                  and Vincent Froese and Rolf Niedermeier and Gerhard
                  J. Woeginger},
  url = {http://arxiv.org/abs/1402.2664v3},
  doi = {10.1137/140978880},
  year = 2015,
  date = {2015-05-01},
  journal = {SIAM Journal on Discrete Mathematics},
  volume = 29,
  number = 2,
  pages = {888-914},
  abstract = {We introduce a graph-theoretic vertex dissolution
                  model that applies to a number of redistribution
                  scenarios, such as gerrymandering in political
                  districting or work balancing in an online
                  situation. The central aspect of our model is the
                  deletion of certain vertices and the redistribution of
                  their load to neighboring vertices in a completely
                  balanced way. We investigate how the underlying graph
                  structure, the knowledge of which vertices should be
                  deleted, and the relation between old and new vertex
                  loads influence the computational complexity of the
                  underlying graph problems. Our results establish a
                  clear borderline between tractable and intractable
                  cases.},
  keywords = {network analysis}
}
@article{BDF+15,
  title = {Myhill-Nerode Methods for Hypergraphs},
  author = {René van Bevern and Rodney G. Downey and Michael
                  R. Fellows and Serge Gaspers and Frances A. Rosamond},
  url = {http://arxiv.org/abs/1211.1299v5},
  doi = {10.1007/s00453-015-9977-x},
  slides = {http://rvb.su/pdf/myhill-nerode-hypergraphs-isaac13.pdf},
  issn = {0178-4617},
  year = 2015,
  date = {2015-02-27},
  journal = {Algorithmica},
  volume = 73,
  number = 4,
  pages = {696-729},
  publisher = {Springer},
  abstract = {We give an analog of the Myhill–Nerode theorem from
                  formal language theory for hypergraphs and use it to
                  derive the following results for two NP-hard
                  hypergraph problems. (1) We provide an algorithm for
                  testing whether a hypergraph has cutwidth at most k
                  that runs in linear time for constant k. In terms of
                  parameterized complexity theory, the problem is
                  fixed-parameter linear parameterized by k. (2) We show
                  that it is not expressible in monadic second-order
                  logic whether a hypergraph has bounded (fractional,
                  generalized) hypertree width. The proof leads us to
                  conjecture that, in terms of parameterized complexity
                  theory, these problems are W[1]-hard parameterized by
                  the incidence treewidth (the treewidth of the
                  incidence graph). Thus, in the form of the
                  Myhill–Nerode theorem for hypergraphs, we obtain a
                  method to derive linear-time algorithms and to obtain
                  indicators for intractability for hypergraph problems
                  parameterized by incidence treewidth.},
  note = {ISAAC'13 special issue},
  keywords = {hypergraphs, invited}
}
@incollection{BNSW14,
  title = {Complexity of arc routing problems},
  author = {René van Bevern and Rolf Niedermeier and Manuel Sorge
                  and Mathias Weller},
  editor = {Ángel Corberán and Gilbert Laporte},
  doi = {10.1137/1.9781611973679.ch2},
  year = 2014,
  date = {2014-12-01},
  booktitle = {Arc Routing: Problems, Methods, and Applications},
  publisher = {SIAM},
  chapter = 2,
  keywords = {routing},
  abstract = {The linked PDF file is only a preview containing the
                  first page and references.  The full text can be read
                  on Google Books.},
  url = {http://rvb.su/pdf/arcrouting-preview.pdf}
}
@article{BBC+17,
  title = {Fixed-parameter algorithms for {DAG} Partitioning},
  journal = {Discrete Applied Mathematics},
  volume = 220,
  pages = {134--160},
  year = 2017,
  issn = {0166-218X},
  doi = {10.1016/j.dam.2016.12.002},
  author = {René van Bevern and Robert Bredereck and Morgan
                  Chopin and Sepp Hartung and Falk Hüffner and André
                  Nichterlein and Ondřej Suchý},
  keywords = {NP-hard problem},
  keywords = {Graph algorithms},
  keywords = {Polynomial-time data reduction},
  keywords = {Multiway cut},
  keywords = {Linear-time algorithms},
  keywords = {Algorithm engineering},
  keywords = {Evaluating heuristics },
  abstract = {Abstract Finding the origin of short phrases
                  propagating through the web has been formalized by
                  Leskovec et al. (2009) as \{DAG\} Partitioning:
                  given an arc-weighted directed acyclic graph on n
                   vertices and m  arcs, delete arcs with total weight
                  at most  k such that each resulting weakly-connected
                  component contains exactly one sink—a vertex without
                  outgoing arcs. \{DAG\} Partitioning is NP-hard. We
                  show an algorithm to solve \{DAG\} Partitioning in O
                  ( 2 k ⋅ ( n + m ) )  time, that is, in linear time
                  for fixed  k . We complement it with linear-time
                  executable data reduction rules. Our experiments
                  show that, in combination, they can optimally solve
                  \{DAG\} Partitioning on simulated citation networks
                  within five minutes for k ≤ 190 and m being  10 7
                  and larger. We use our obtained optimal solutions to
                  evaluate the solution quality of Leskovec et al.’s
                  heuristic. We show that Leskovec et al.’s heuristic
                  works optimally on trees and generalize this result
                  by showing that \{DAG\} Partitioning is solvable in
                  2 O ( t 2 ) ⋅ n time if a width- t tree
                  decomposition of the input graph is given. Thus, we
                  improve an algorithm and answer an open question of
                  Alamdari and Mehrabian (2012). We complement our
                  algorithms by lower bounds on the running time of
                  exact algorithms and on the effectivity of data
                  reduction. },
  url = {https://arxiv.org/abs/1611.08809}
}
@phdthesis{Bev14,
  title = {Fixed-Parameter Linear-Time Algorithms for NP-hard
                  Graph and Hypergraph Problems Arising in Industrial
                  Applications},
  author = {René van Bevern},
  doi = {10.14279/depositonce-4131},
  slides = {http://rvb.su/pdf/fixed-parameter-linear-time-slides.pdf},
  isbn = {978-3-7983-2706-1},
  year = 2014,
  date = {2014-10-01},
  publisher = {Universitätsverlag der TU Berlin},
  school = {Fakultät IV -- Elektrotechnik und Informatik, TU
                  Berlin},
  abstract = {This thesis aims for the development of efficient
                  algorithms to exactly solve four selected NP-hard
                  graph and hypergraph problems arising in the fields of
                  scheduling, steel manufactoring, software engineering,
                  radio frequency allocation, computer-aided circuit
                  design, and social network analysis. NP-hard problems
                  presumably cannot be solved exactly in a running time
                  growing only polynomially with the input size. In
                  order to still solve the considered problems
                  efficiently, this thesis develops linear-time data
                  reduction and fixed-parameter linear-time
                  algorithms—algorithms that can be proven to run in
                  linear time if certain parameters of the problem
                  instances are constant. Besides proving linear
                  worst-case running times, the efficiency of most of
                  the developed algorithms is evaluated
                  experimentally. Moreover, the limits of
                  fixed-parameter linear-time algorithms and provably
                  efficient and effective data reduction are shown.},
  note = {PhD thesis}
}
@article{Bev14b,
  title = {Towards Optimal and Expressive Kernelization for
                  $d$-Hitting Set},
  author = {René van Bevern},
  url = {http://arxiv.org/abs/1112.2310v3},
  doi = {10.1007/s00453-013-9774-3},
  slides = {http://rvb.su/pdf/towards-optimal-and-expressive-kernelization-for-hitting-set-63tt.pdf},
  sourcecode = {http://fpt.akt.tu-berlin.de/hslinkern/},
  issn = {0178-4617},
  year = 2014,
  date = {2014-09-01},
  journal = {Algorithmica},
  volume = 70,
  number = 1,
  pages = {129-147},
  publisher = {Springer},
  abstract = {A sunflower in a hypergraph is a set of hyperedges
                  pairwise intersecting in exactly the same vertex
                  set. Sunflowers are a useful tool in polynomial-time
                  data reduction for problems formalizable as d-Hitting
                  Set, the problem of covering all hyperedges (whose
                  cardinality is bounded from above by a constant d) of
                  a hypergraph by at most k vertices. Additionally, in
                  fault diagnosis, sunflowers yield concise explanations
                  for “highly defective structures”. We provide a
                  linear-time algorithm that, by finding sunflowers,
                  transforms an instance of d-Hitting Set into an
                  equivalent instance comprising at most O(k^d)
                  hyperedges and vertices. In terms of parameterized
                  complexity, we show a problem kernel with
                  asymptotically optimal size (unless coNP⊆NP/poly) and
                  provide experimental results that show the practical
                  applicability of our algorithm. Finally, we show that
                  the number of vertices can be reduced to O(k^(d−1))
                  with additional processing in O(k^(1.5d))
                  time—nontrivially combining the sunflower technique
                  with problem kernels due to Abu-Khzam and Moser.},
  note = {COCOON'12 special issue},
  keywords = {hypergraphs, invited}
}
@conference{BBC+14b,
  title = {Network-Based Dissolution},
  author = {René van Bevern and Robert Bredereck and Jiehua Chen
                  and Vincent Froese and Rolf Niedermeier and Gerhard
                  J. Woeginger},
  doi = {10.1007/978-3-662-44465-8_7},
  isbn = {978-3-662-44464-1},
  year = {2014},
  date = {2014-08-25},
  booktitle = {Proceedings of 39th International Symposium on
                  Mathematical Foundations of Computer Science
                  (MFCS'14), Budapest, Hungary},
  volume = {8635},
  pages = {69-80},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {We introduce a graph-theoretic dissolution model that
                  applies to a number of redistribution scenarios such
                  as gerrymandering in political districting or work
                  balancing in an online situation. The central aspect
                  of our model is the deletion of certain vertices and
                  the redistribution of their loads to neighboring
                  vertices in a perfectly balanced way. We investigate
                  how the underlying graph structure, the pre-knowledge
                  of which vertices should be deleted, and the relation
                  between old and new vertex loads influence the
                  computational complexity of the underlying graph
                  problems. Our results establish a clear borderline
                  between tractable and intractable cases.},
  note = {The final version of this article appeared in SIAM
                  Journal on Discrete Mathematics},
  longversion = {BBC+15.html},
  keywords = {network analysis}
}
@conference{BBB+14,
  title = {Star Partitions of Perfect Graphs},
  author = {René van Bevern and Robert Bredereck and Laurent
                  Bulteau and Jiehua Chen and Vincent Froese and Rolf
                  Niedermeier and Gerhard J. Woeginger},
  doi = {10.1007/978-3-662-43948-7_15},
  isbn = {978-3-662-43947-0},
  year = {2014},
  date = {2014-07-01},
  booktitle = {Proceedings of the 41st International Colloquium on
                  Automata, Languages, and Programming (ICALP'14),
                  Copenhagen, Denmark},
  volume = {8572},
  pages = {174-185},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {The partition of graphs into nice subgraphs is a
                  central algorithmic problem with strong ties to
                  matching theory. We study the partitioning of
                  undirected graphs into stars, a problem known to be
                  NP-complete even for the case of stars on three
                  vertices. We perform a thorough computational
                  complexity study of the problem on subclasses of
                  perfect graphs and identify several polynomial-time
                  solvable cases, for example, on interval graphs and
                  bipartite permutation graphs, and also NP-hard cases,
                  for example, on grid graphs and chordal graphs.},
  note = {The final version of this article appeared in Journal
                  of Graph Theory},
  longversion = {BBB+17.html},
  keywords = {algorithmic graph theory}
}
@article{BHNS14,
  title = {Constant-factor approximations for Capacitated Arc
                  Routing without triangle inequality},
  author = {René van Bevern and Sepp Hartung and André Nichterlein
                  and Manuel Sorge},
  url = {http://arxiv.org/abs/1404.3660v1},
  doi = {10.1016/j.orl.2014.05.002},
  issn = {0167-6377},
  year = 2014,
  date = {2014-06-01},
  journal = {Operations Research Letters},
  volume = 42,
  number = 4,
  pages = {290-292},
  abstract = {Abstract Given an undirected graph with edge costs and
                  edge demands, the Capacitated Arc Routing problem
                  (CARP) asks for minimum-cost routes for equal-capacity
                  vehicles so as to satisfy all demands. Constant-factor
                  polynomial-time approximation algorithms were proposed
                  for CARP with triangle inequality, while CARP was
                  claimed to be NP-hard to approximate within any
                  constant factor in general. Correcting this claim, we
                  show that any factor α approximation for CARP with
                  triangle inequality yields a factor α approximation
                  for the general CARP.},
  keywords = {routing, scheduling}
}
@conference{BFGR13,
  title = {Myhill-Nerode Methods for Hypergraphs},
  author = {René van Bevern and Michael R. Fellows and Serge
                  Gaspers and Frances A. Rosamond},
  doi = {10.1007/978-3-642-45030-3_35},
  slides = {http://rvb.su/pdf/myhill-nerode-hypergraphs-isaac13.pdf},
  isbn = {978-3-642-45029-7},
  year = {2013},
  date = {2013-12-13},
  booktitle = {Proceedings of the 24th International Symposium on
                  Algorithms and Computation (ISAAC'13), Hong Kong,
                  China},
  volume = {8283},
  pages = {372-382},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {We introduce a method of applying Myhill-Nerode
                  methods from formal language theory to hypergraphs and
                  show how this method can be used to obtain the
                  following parameterized complexity results. Hypergraph
                  Cutwidth (deciding whether a hypergraph on n vertices
                  has cutwidth at most k) is linear-time solvable for
                  constant k. For hypergraphs of constant incidence
                  treewidth (treewidth of the incidence graph),
                  Hypertree Width and variants cannot be solved by
                  simple finite tree automata. The proof leads us to
                  conjecture that Hypertree Width is W[1]-hard for this
                  parameter.},
  note = {The final version of this article appeared in the
                  ISAAC'13 special issue of Algorithmica},
  longversion = {BDF+15.html},
  keywords = {hypergraphs}
}
@conference{FBNS13,
  title = {A Parameterized Complexity Analysis of Combinatorial
                  Feature Selection Problems},
  author = {Vincent Froese and René van Bevern and Rolf
                  Niedermeier and Manuel Sorge},
  doi = {10.1007/978-3-642-40313-2_40},
  isbn = {978-3-642-40312-5},
  year = {2013},
  date = {2013-08-26},
  booktitle = {Proceedings of the 38th International on Symposium
                  Mathematical Foundations of Computer Science
                  (MFCS'13), Klosterneuburg, Austria},
  volume = {8087},
  pages = {445-456},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {We examine the algorithmic tractability of NP-hard
                  combinatorial feature selection problems in terms of
                  parameterized complexity theory. In combinatorial
                  feature selection, one seeks to discard dimensions
                  from high-dimensional data such that the resulting
                  instances fulfill a desired property. In parameterized
                  complexity analysis, one seeks to identify relevant
                  problem-specific quantities and tries to determine
                  their influence on the computational complexity of the
                  considered problem. In this paper, for various
                  combinatorial feature selection problems, we identify
                  parameterizations and reveal to what extent these
                  govern computational complexity. We provide
                  tractability as well as intractability results; for
                  example, we show that the Distinct Vectors problem on
                  binary points is polynomial-time solvable if each pair
                  of points differs in at most three dimensions, whereas
                  it is NP-hard otherwise.},
  note = {The final version of this article appeared in Journal
                  of Computer and System Sciences},
  longversion = {FBNS16.html}
}
@conference{BFSS13,
  title = {On the Parameterized Complexity of Computing Graph
                  Bisections},
  author = {René van Bevern and Andreas Emil Feldmann and Manuel
                  Sorge and Ondřej Suchý},
  doi = {10.1007/978-3-642-45043-3_8},
  isbn = {978-3-642-45042-6},
  year = {2013},
  date = {2013-06-19},
  booktitle = {Proceedings of the 39th International Workshop on
                  Graph-Theoretic Concepts in Computer Science (WG'13),
                  Lübeck, Germany},
  volume = {8165},
  pages = {76-87},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {The Bisection problem asks for a partition of the
                  vertices of a graph into two equally sized sets, while
                  minimizing the cut size. This is the number of edges
                  connecting the two vertex sets. Bisection has been
                  thoroughly studied in the past. However, only few
                  results have been published that consider the
                  parameterized complexity of this problem. We show that
                  Bisection is FPT w.r.t. the minimum cut size if there
                  is an optimum bisection that cuts into a given
                  constant number of connected components. Our algorithm
                  applies to the more general Balanced Biseparator
                  problem where vertices need to be removed instead of
                  edges. We prove that this problem is W[1]-hard
                  w.r.t. the minimum cut size and the number of cut out
                  components. For Bisection we further show that no
                  polynomial-size kernels exist for the cut size
                  parameter. In fact, we show this for all parameters
                  that are polynomial in the input size and that do not
                  increase when taking disjoint unions of graphs. We
                  prove fixed-parameter tractability for the distance to
                  constant cliquewidth if we are given the deletion
                  set. This implies fixed-parameter algorithms for some
                  well-studied parameters such as cluster vertex
                  deletion number and feedback vertex set.},
  note = {The final version of this article appeared in Theory
                  of Computing Systems},
  slides = {http://rvb.su/pdf/bipartition.pdf},
  longversion = {BFSS15.html},
  keywords = {graph modification}
}
@conference{BBC+13,
  title = {Parameterized Complexity of {DAG} Partitioning},
  author = {René van Bevern and Robert Bredereck and Morgan
                  Chopin and Sepp Hartung and Falk Hüffner and André
                  Nichterlein and Ondřej Suchý},
  url = {http://www.user.tu-berlin.de/hueffner/dag-partitioning-ciac13.pdf},
  doi = {10.1007/978-3-642-38233-8_5},
  isbn = {978-3-642-38232-1},
  year = {2013},
  date = {2013-05-22},
  booktitle = {Proceedings of the 8th International Conference on
                  Algorithms and Complexity (CIAC'13), Barcelona,
                  Spain},
  volume = {7878},
  pages = {49-60},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {The goal of tracking the origin of short,
                  distinctive phrases (memes) that propagate through
                  the web in reaction to current events has been
                  formalized as DAG Partitioning: given a directed
                  acyclic graph, delete edges of minimum weight such
                  that each resulting connected component of the
                  underlying undirected graph contains only one
                  sink. Motivated by NP-hardness and hardness of
                  approximation results, we consider the parameterized
                  complexity of this problem. We show that it can be
                  solved in O(2^k·n^2) time, where k is the number of
                  edge deletions, proving fixed-parameter tractability
                  for parameter k. We then show that unless the
                  Exponential Time Hypothesis (ETH) fails, this cannot
                  be improved to 2^o(k)·n^O(1); further, DAG
                  Partitioning does not have a polynomial kernel
                  unless NP ⊆ coNP/poly. Finally, given a tree
                  decomposition of width w, we show how to solve DAG
                  Partitioning in 2^O(w^2)⋅n time, improving a known
                  algorithm for the parameter pathwidth.},
  note = {A final version of this article with improved
                  algorithms and an experimental evaluations has
                  appeared in Discrete Applied Mathematics.},
  keywords = {graph modification, network analysis}
}
@conference{BMNW12,
  title = {Interval Scheduling and Colorful Independent Sets},
  author = {René van Bevern and Matthias Mnich and Rolf
                  Niedermeier and Mathias Weller},
  doi = {10.1007/978-3-642-35261-4_28},
  isbn = {978-3-642-35260-7},
  year = {2012},
  date = {2012-12-19},
  booktitle = {Proceedings of the 23rd International Symposium on
                  Algorithms and Computation (ISAAC'12), Taipei, Taiwan},
  volume = {7676},
  number = {7676},
  pages = {247--256},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {The NP-hard Independent Set problem is to determine
                  for a given graph G and an integer k whether G
                  contains a set of k pairwise non-adjacent
                  vertices. The problem has numerous applications in
                  scheduling, including resource allocation and steel
                  manufacturing. There, one encounters restricted graph
                  classes such as 2-union graphs, which are edge-wise
                  unions of two interval graphs on the same vertex set,
                  or strip graphs, where additionally one of the two
                  interval graphs is a disjoint union of cliques. We
                  prove NP-hardness of Independent Set on a very
                  restricted subclass of 2-union graphs and identify
                  natural parameterizations to chart the possibilities
                  and limitations of effective polynomial-time
                  preprocessing (kernelization) and fixed-parameter
                  algorithms. Our algorithms benefit from novel
                  formulations of the computational problems in terms of
                  (list-)colored interval graphs.},
  note = {The final version of this article appeared in Journal
                  of Scheduling},
  longversion = {BMNW15.html},
  keywords = {scheduling}
}
@article{SBNW12,
  title = {A new view on Rural Postman based on Eulerian
                  Extension and Matching},
  author = {Manuel Sorge and René van Bevern and Rolf Niedermeier
                  and Mathias Weller},
  url = {http://fpt.akt.tu-berlin.de/publications/A_New_View_on_Rural_Postman_Based_on_Eulerian_Extension_and_Matching-JDA.pdf},
  doi = {10.1016/j.jda.2012.04.007},
  issn = {1570-8667},
  year = 2012,
  date = {2012-10-01},
  journal = {Journal of Discrete Algorithms},
  volume = 16,
  pages = {12--33},
  abstract = {We provide a new characterization of the NP-hard arc
                  routing problem Rural Postman in terms of a
                  constrained variant of minimum-weight perfect matching
                  on bipartite graphs. To this end, we employ a
                  parameterized equivalence between Rural Postman and
                  Eulerian Extension, a natural arc addition problem in
                  directed multigraphs. We indicate the NP-hardness of
                  the introduced matching problem. In particular, we use
                  the matching problem to make partial progress towards
                  answering the open question about the parameterized
                  complexity of Rural Postman with respect to the
                  parameter “number of weakly connected components in
                  the graph induced by the required arcs”. This is a
                  more than thirty years open and long-neglected
                  question with significant practical relevance.},
  note = {IWOCA'11 special issue},
  keywords = {graph modification, invited, routing}
}
@conference{BHK+11,
  title = {Linear-Time Computation of a Linear Problem Kernel for
                  Dominating Set on Planar Graphs},
  author = {René van Bevern and Sepp Hartung and Frank Kammer and
                  Rolf Niedermeier and Mathias Weller},
  url = {http://fpt.akt.tu-berlin.de/publications/Linear-Time_Computation_of_a_Linear_Problem_Kernel_for_Dominating_set_on_Planar_Graphs-IPEC.pdf},
  slides = {http://rvb.su/pdf/linear-time-linear-kernel-dominating-set.pdf},
  doi = {10.1007/978-3-642-28050-4_16},
  isbn = {978-3-642-28049-8},
  year = {2012},
  date = {2012-09-06},
  booktitle = {Proceedings of the 6th International Symposium on
                  Parameterized and Exact Computation (IPEC'11),
                  Saarbrücken, Germany},
  volume = {7112},
  pages = {194--206},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {We present a linear-time kernelization algorithm that
                  transforms a given planar graph G with domination
                  number γ(G) into a planar graph G' of size O(γ(G))
                  with γ(G) = γ(G'). In addition, a minimum dominating
                  set for G can be inferred from a minimum dominating
                  set for G'. In terms of parameterized algorithmics,
                  this implies a linear-size problem kernel for the
                  NP-hard Dominating Set problem on planar graphs, where
                  the kernelization takes linear time. This improves on
                  previous kernelization algorithms that provide
                  linear-size kernels in cubic time.},
  keywords = {algorithmic graph theory}
}
@conference{Bev12,
  title = {Towards Optimal and Expressive Kernelization for
                  $d$-Hitting Set},
  author = {René van Bevern},
  doi = {10.1007/978-3-642-32241-9_11},
  slides = {http://rvb.su/pdf/towards-optimal-and-expressive-kernelization-for-hitting-set-63tt.pdf},
  isbn = {978-3-642-32240-2},
  year = {2012},
  date = {2012-08-20},
  booktitle = {Proceedings of the 18th Annual International Computing
                  and Combinatorics Conference (COCOON'12), Sydney,
                  Australia},
  volume = {7434},
  pages = {121-132},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {A sunflower in a hypergraph is a set of hyperedges
                  pairwise intersecting in exactly the same vertex
                  set. Sunflowers are a useful tool in polynomial-time
                  data reduction for problems formalizable as d-Hitting
                  Set, the problem of covering all hyperedges (of
                  cardinality at most d) of a hypergraph by at most k
                  vertices. Additionally, in fault diagnosis, sunflowers
                  yield concise explanations for “highly defective
                  structures”. We provide a linear-time algorithm that,
                  by finding sunflowers, transforms an instance of
                  d-Hitting Set into an equivalent instance comprising
                  at most O(k^d) hyperedges and vertices. In terms of
                  parameterized complexity, we show a problem kernel
                  with asymptotically optimal size (unless
                  coNP ⊆ NP/poly). We show that the number of vertices
                  can be reduced to O(k^(d−1)) with additional
                  processing in O(k^(1.5d)) time—nontrivially combining
                  the sunflower technique with problem kernels due to
                  Abu-Khzam and Moser.},
  note = {The final version of this article appeared in the
                  COCOON'12 special issue of Algorithmica},
  longversion = {Bev14b.html},
  keywords = {hypergraphs}
}
@article{BMN12,
  title = {Approximation and Tidying---A Problem Kernel for
                  $s$-Plex Cluster Vertex Deletion},
  author = {René van Bevern and Hannes Moser and Rolf Niedermeier},
  url = {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/approximation-and-tidying.pdf},
  slides = {http://rvb.su/pdf/kernelization-through-tidying-talk-latin10.pdf},
  doi = {10.1007/s00453-011-9492-7},
  issn = {0178-4617},
  year = 2012,
  date = {2012-04-01},
  journal = {Algorithmica},
  volume = 62,
  number = {3-4},
  pages = {930-950},
  publisher = {Springer},
  abstract = {We introduce the NP-hard graph-based data clustering
                  problem s-Plex Cluster Vertex Deletion, where the task
                  is to delete at most k vertices from a graph so that
                  the connected components of the resulting graph are
                  s-plexes. In an s-plex, every vertex has an edge to
                  all but at most s−1 other vertices; cliques are
                  1-plexes. We propose a new method based on
                  “approximation and tidying” for kernelizing vertex
                  deletion problems whose goal graphs can be
                  characterized by forbidden induced subgraphs. The
                  method exploits polynomial-time approximation results
                  and thus provides a useful link between approximation
                  and kernelization. Employing “approximation and
                  tidying”, we develop data reduction rules that, in
                  O(ksn^2) time, transform an s-Plex Cluster Vertex
                  Deletion instance with n vertices into an equivalent
                  instance with O(k^2 s^3) vertices, yielding a problem
                  kernel. To this end, we also show how to exploit
                  structural properties of the specific problem in order
                  to significantly improve the running time of the
                  proposed kernelization method.},
  keywords = {graph modification}
}
@article{BBF+11,
  title = {Parameterized Algorithmics for Finding Connected
                  Motifs in Biological Networks},
  author = {Nadja Betzler and René van Bevern and Michael
                  R. Fellows and Christian Komusiewicz and Rolf
                  Niedermeier},
  url = {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/finding-connected-motifs-in-biological-networks.pdf},
  sourcecode = {http://fpt.akt.tu-berlin.de/graph-motif/},
  doi = {10.1109/TCBB.2011.19},
  issn = {1545-5963},
  year = 2011,
  date = {2011-09-01},
  journal = {IEEE/ACM Transactions on Computational Biology and
                  Bioinformatics},
  volume = 8,
  number = 5,
  pages = {1296-1308},
  abstract = {We study the NP-hard LIST-COLORED GRAPH MOTIF problem
                  which, given an undirected list-colored graph G = (V,
                  E) and a multiset M of colors, asks for
                  maximum-cardinality sets S ⊆ V and M' ⊆ M such that
                  G[S] is connected and contains exactly (with respect
                  to multiplicity) the colors in M'. LIST-COLORED GRAPH
                  MOTIF has applications in the analysis of biological
                  networks. We study LIST-COLORED GRAPH MOTIF with
                  respect to three different parameterizations. For the
                  parameters motif size |M| and solution size |S|, we
                  present fixed-parameter algorithms, whereas for the
                  parameter |V| - |M|, we show W[1]-hardness for general
                  instances and achieve fixed-parameter tractability for
                  a special case of LIST-COLORED GRAPH MOTIF. We
                  implemented the fixed-parameter algorithms for
                  parameters |M| and |S|, developed further speed-up
                  heuristics for these algorithms, and applied them in
                  the context of querying protein-interaction networks,
                  demonstrating their usefulness for realistic
                  instances. Furthermore, we show that extending the
                  request for motif connectedness to stronger demands,
                  such as biconnectedness or bridge-connectedness leads
                  to W[1]-hard problems when the parameter is the motif
                  size |M|.},
  keywords = {network analysis}
}
@conference{SBNW11,
  title = {From Few Components to an Eulerian Graph by Adding
                  Arcs},
  author = {Manuel Sorge and René van Bevern and Rolf Niedermeier
                  and Mathias Weller},
  url = {http://fpt.akt.tu-berlin.de/publications/From_Few_Components_to_an_Eulerian_Graph_by_Adding_Arcs-WG.pdf},
  doi = {10.1007/978-3-642-25870-1_28},
  isbn = {978-3-642-25869-5},
  year = {2011},
  date = {2011-06-21},
  booktitle = {Proceedings of the 37th International Workshop on
                  Graph-Theoretic Concepts in Computer Science (WG'11),
                  Teplá Monastery, Czech Republic},
  volume = {6986},
  pages = {307--318},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {Eulerian Extension (EE) is the problem to make an
                  arc-weighted directed multigraph Eulerian by adding
                  arcs of minimum total cost. EE is NP-hard and has been
                  shown fixed-parameter tractable with respect to the
                  number of arc additions. Complementing this result, on
                  the way to answering an open question, we show that EE
                  is fixed-parameter tractable with respect to the
                  combined parameter “number of connected components in
                  the underlying undirected multigraph” and “sum of
                  indeg(v) - outdeg(v) over all vertices v in the input
                  multigraph where this value is positive.” Moreover, we
                  show that EE is unlikely to admit a polynomial-size
                  problem kernel for this parameter combination and for
                  the parameter “number of arc additions”.},
  keywords = {graph modification, routing}
}
@conference{BKMN10,
  title = {Measuring Indifference: Unit Interval Vertex Deletion},
  author = {René van Bevern and Christian Komusiewicz and Hannes
                  Moser and Rolf Niedermeier},
  url = {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/unit-interval-vertex-deletion-wg10.pdf},
  doi = {10.1007/978-3-642-16926-7_22},
  slides = {http://rvb.su/pdf/unit-interval-vertex-deletion-talk-wg2010.pdf},
  isbn = {978-3-642-16925-0},
  year = {2010},
  date = {2010-06-28},
  booktitle = {Proceedings of the 36th International Workshop on
                  Graph Theoretic Concepts in Computer Science (WG'10),
                  Zarós, Crete, Greece},
  volume = {6410},
  pages = {232--243},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {Making a graph unit interval by a minimum number of
                  vertex deletions is NP-hard. The problem is motivated
                  by applications in seriation and measuring
                  indifference between data items. We present a
                  fixed-parameter algorithm based on the iterative
                  compression technique that finds in
                  O((14k+14)^(k+1)kn^6) time a set of k vertices whose
                  deletion from an n-vertex graph makes it unit
                  interval. Additionally, we show that making a graph
                  chordal by at most k vertex deletions is NP-complete
                  even on claw,net,tent-free graphs.},
  keywords = {algorithmic graph theory, graph modification}
}
@conference{BMN10,
  title = {Kernelization through Tidying---A Case Study Based on
                  $s$-Plex Cluster Vertex Deletion},
  author = {René van Bevern and Hannes Moser and Rolf Niedermeier},
  doi = {10.1007/978-3-642-12200-2_46},
  isbn = {978-3-642-12199-9},
  slides = {http://rvb.su/pdf/kernelization-through-tidying-talk-latin10.pdf},
  year = {2010},
  date = {2010-04-19},
  booktitle = {Proceedings of the 9th Latin American Symposium on
                  Theoretical Informatics (LATIN'10), Oaxaca, Mexico},
  volume = {6034},
  pages = {527-538},
  publisher = {Springer},
  series = {Lecture Notes in Computer Science},
  abstract = {We introduce the NP-hard graph-based data clustering
                  problem s-Plex Cluster Vertex Deletion, where the task
                  is to delete at most k vertices from a graph so that
                  the connected components of the resulting graph are
                  s-plexes. In an s-plex, every vertex has an edge to
                  all but at most s−1 other vertices; cliques are
                  1-plexes. We propose a new method for kernelizing a
                  large class of vertex deletion problems and illustrate
                  it by developing an O(k^2 s^3)-vertex problem kernel
                  for s-Plex Cluster Vertex Deletion that can be
                  computed in O(ksn^2) time, where n is the number of
                  graph vertices. The corresponding “kernelization
                  through tidying” exploits polynomial-time
                  approximation results.},
  note = {The final version of this article appeared in
                  Algorithmica},
  longversion = {BMN12.html},
  keywords = {graph modification}
}
@mastersthesis{Bev10,
  title = {The Computational Hardness and Tractability of
                  Restricted Seriation Problems on Inaccurate Data},
  author = {René van Bevern},
  url = {http://rvb.su/pdf/Bev10.pdf},
  year = 2010,
  date = {2010-04-01},
  address = {Jena, Germany},
  school = {Institut für Informatik,
                  Friedrich-Schiller-Universität},
  note = {Diploma thesis}
}
@misc{Bev09,
  title = {Graph-based data clustering: a quadratic-vertex
                  problem kernel for $s$-Plex Cluster Vertex Deletion},
  author = {René van Bevern},
  url = {http://arxiv.org/abs/0909.2814},
  year = 2009,
  date = {2009-09-01},
  address = {Jena, Germany},
  school = {Institut für Informatik,
                  Friedrich-Schiller-Universität},
  note = {Undergraduate thesis},
  slides = {http://rvb.su/pdf/2plex-cvd.pdf},
  keywords = {graph modification}
}

This file was generated by bibtex2html 1.98.