Some work on the incremental chapter

This commit is contained in:
Jip J. Dekker 2021-02-22 15:37:13 +11:00
parent 9475102e5a
commit ab6adde55c
No known key found for this signature in database
GPG Key ID: 517DF4A00618C9C3
11 changed files with 270 additions and 148 deletions

View File

@ -5,7 +5,7 @@ PY_LISTINGS := $(addsuffix tex, $(wildcard assets/py/*.py) )
.PHONY: $(PROJECT).pdf clean clobber .PHONY: $(PROJECT).pdf clean clobber
$(PROJECT).pdf: $(PROJECT).tex $(PROJECT).pdf: $(PROJECT).tex listings
latexmk -use-make $< latexmk -use-make $<
listings: $(MZN_LISTINGS) $(PY_LISTINGS) listings: $(MZN_LISTINGS) $(PY_LISTINGS)

View File

@ -1 +1,2 @@
\newacronym[see={[Glossary:]{gls-cp}}]{cp}{CP}{Constraint Programming\glsadd{gls-cp}} \newacronym[see={[Glossary:]{gls-cp}}]{cp}{CP}{Constraint Programming\glsadd{gls-cp}}
\newacronym[see={[Glossary:]{gls-lns}}]{lns}{LNS}{Large Neighbourhood Search\glsadd{gls-lns}}

View File

@ -32,6 +32,25 @@
bibsource = {dblp computer science bibliography, https://dblp.org} bibsource = {dblp computer science bibliography, https://dblp.org}
} }
@inproceedings{ek-2020-online,
author = {Alexander Ek and Maria Garcia de la Banda and Andreas
Schutt and Peter J. Stuckey and Guido Tack},
title = {Modelling and Solving Online Optimisation Problems},
booktitle = {The Thirty-Fourth {AAAI} Conference on Artificial
Intelligence, {AAAI} 2020, The Thirty-Second Innovative
Applications of Artificial Intelligence Conference, {IAAI}
2020, The Tenth {AAAI} Symposium on Educational Advances in
Artificial Intelligence, {EAAI} 2020, New York, NY, USA,
February 7-12, 2020},
pages = {1477--1485},
publisher = {{AAAI} Press},
year = 2020,
url = {https://aaai.org/ojs/index.php/AAAI/article/view/5506},
timestamp = {Tue, 02 Feb 2021 08:00:20 +0100},
biburl = {https://dblp.org/rec/conf/aaai/EkBSST20.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@article{freuder-1997-holygrail, @article{freuder-1997-holygrail,
author = {Eugene C. Freuder}, author = {Eugene C. Freuder},
title = {In Pursuit of the Holy Grail}, title = {In Pursuit of the Holy Grail},
@ -73,6 +92,25 @@
bibsource = {dblp computer science bibliography, https://dblp.org} bibsource = {dblp computer science bibliography, https://dblp.org}
} }
@inproceedings{ingmar-2020-diverse,
author = {Linnea Ingmar and Maria Garcia de la Banda and Peter J.
Stuckey and Guido Tack},
title = {Modelling Diversity of Solutions},
booktitle = {The Thirty-Fourth {AAAI} Conference on Artificial
Intelligence, {AAAI} 2020, The Thirty-Second Innovative
Applications of Artificial Intelligence Conference, {IAAI}
2020, The Tenth {AAAI} Symposium on Educational Advances in
Artificial Intelligence, {EAAI} 2020, New York, NY, USA,
February 7-12, 2020},
pages = {1528--1535},
publisher = {{AAAI} Press},
year = 2020,
url = {https://aaai.org/ojs/index.php/AAAI/article/view/5512},
timestamp = {Tue, 02 Feb 2021 08:00:14 +0100},
biburl = {https://dblp.org/rec/conf/aaai/IngmarBST20.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@book{jaillet-2021-online, @book{jaillet-2021-online,
title = {Online Optimization}, title = {Online Optimization},
author = {Jaillet, P. and Wagner, M.R.}, author = {Jaillet, P. and Wagner, M.R.},

View File

@ -11,41 +11,107 @@
\newglossaryentry{constraint}{ \newglossaryentry{constraint}{
name={constraint}, name={constraint},
description={TODO}, description={A constraint is a relationship between two or more decision
variables or problem parameters that has to present in any valid solution to a
problem},
} }
\newglossaryentry{constraint-modelling}{
name={constraint modelling},
description={Constraint modelling is a technique used to describe
combinatorial problems. In this paradigm the problem in terms of
\glspl{decision-variable} that have an unknown value, but are potentially
subject to certain \glspl{constraint}},
}
\newglossaryentry{gls-cp}{ \newglossaryentry{gls-cp}{
name={constraint programming}, name={constraint programming},
description={}, description={Constraint Programming (CP) is a paradigm used to solve
combinatorial problems. Its distinctive features are the declarative way in
which the user creates a problem description, in this thesis referred to as
\gls{constraint-modelling}, and its backtracking search that employs
\gls{propagation} and customisable search heuristics},
} }
\newglossaryentry{decision-variable}{ \newglossaryentry{decision-variable}{
name={decision variable}, name={decision variable},
description={TODO}, description={A decision variable is a value that is yet to be determined. A
problem defined as a constraint model is solved by assigning a value to each
variable that does not violate any constraints in the model and, in case of an
optimisation problem, optimises the objective function}
} }
\newglossaryentry{flatzinc}{ \newglossaryentry{flatzinc}{
name={Flat\-Zinc}, name={Flat\-Zinc},
description={TODO}, description={A subset of the \minizinc\ syntax that is used as input for
\glspl{solver}},
} }
\newglossaryentry{global}{
name={global constraint},
description={A global constraint is a common \gls{constraint} pattern that can
be described using simpler \glspl{constraint}. \Glspl{solver} sometimes provide
dedicated algorithms or rewriting rules to better handle the global constraint},
}
\newglossaryentry{gls-lns}{
name={large neighbourhood search},
description={Large Neighbourhood Search (LNS) is a meta-search algorithm that
repeatedly restricts the search space, applying a \gls{neighbourhood}, to
quickly find better solutions to a problem},
}
\newglossaryentry{meta-search}{
name={meta-search},
description={A search approach that repeatedly solves constraint models},
}
\newglossaryentry{microzinc}{ \newglossaryentry{microzinc}{
name={Micro\-Zinc}, name={Micro\-Zinc},
description={TODO}, description={TODO},
} }
\newglossaryentry{minisearch}{ \newglossaryentry{minisearch}{
name={Mini\-Search}, name={Mini\-Search},
description={TODO}, description={TODO},
} }
\newglossaryentry{minizinc}{ \newglossaryentry{minizinc}{
name={Mini\-Zinc}, name={Mini\-Zinc},
description={TODO}, description={A high-level \gls{constraint-modelling} language with an
extensive library of \glspl{global}},
} }
\newglossaryentry{nanozinc}{ \newglossaryentry{nanozinc}{
name={Nano\-Zinc}, name={Nano\-Zinc},
description={TODO}, description={TODO},
} }
\newglossaryentry{neighbourhood}{
name={neighbourhood},
description={A neighbourhood is a restriction of the search space of the
\gls{solver}},
}
\newglossaryentry{solver}{ \newglossaryentry{solver}{
name={solver}, name={solver},
description={TODO}, description={A solver is a dedicated program or algorithm that can be used to
solve combinatorial problems, or a subset thereof},
} }
\newglossaryentry{problem-parameter}{ \newglossaryentry{problem-parameter}{
name={problem parameter}, name={problem parameter},
description={TODO}, description={A problem parameter is a constant value that helps define the
problem. Its value can differ among different problem instances. Its exact value
must be known when rewriting a constraint model, but is not required when
compiling a constraint model into a executable program},
}
\newglossaryentry{propagation}{
name={constraint propagation},
description={Constraint propagation is the inference that
\glspl{decision-variable} can no longer take a certain values, lest they would
violate a \gls{constraint}},
} }

View File

@ -1,8 +1,8 @@
function ann: lns(var int: obj, array[int] of var int: vars, function ann: lns(var int: obj, array[int] of var int: vars,
int: iterations, float: destrRate, int: exploreTime) = int: iterations, float: destr_rate, int: explore_time) =
repeat (i in 1..iterations) ( scope( repeat (i in 1..iterations) ( scope(
if has_sol() then post(uniformNeighbourhood(vars,destrRate)) if has_sol() then post(uniform_neighbourhood(vars, destr_rate))
else true endif /\ else true endif /\
time_limit(exploreTime, minimize_bab(obj)) /\ time_limit(explore_time, minimize_bab(obj)) /\
commit() /\ print() commit() /\ print()
) /\ post(obj < sol(obj)) ); ) /\ post(obj < sol(obj)) );

View File

@ -1,10 +1,10 @@
\begin{Verbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8},xleftmargin=5mm] \begin{Verbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8},xleftmargin=5mm]
\PY{k}{function}\PY{l+s}{ }\PY{k+kt}{ann}\PY{p}{:}\PY{l+s}{ }\PY{n+nf}{lns}\PY{p}{(}\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{obj}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{array}\PY{p}{[}\PY{k+kt}{int}\PY{p}{]}\PY{l+s}{ }\PY{k+kt}{of}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{vars}\PY{p}{,} \PY{k}{function}\PY{l+s}{ }\PY{k+kt}{ann}\PY{p}{:}\PY{l+s}{ }\PY{n+nf}{lns}\PY{p}{(}\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{obj}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{array}\PY{p}{[}\PY{k+kt}{int}\PY{p}{]}\PY{l+s}{ }\PY{k+kt}{of}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{vars}\PY{p}{,}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{iterations}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{destrRate}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{exploreTime}\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{=} \PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{iterations}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{destr\PYZus{}rate}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{explore\PYZus{}time}\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{=}
\PY{l+s}{ }\PY{l+s}{ }\PY{n+nv}{repeat}\PY{l+s}{ }\PY{p}{(}\PY{n+nv}{i}\PY{l+s}{ }\PY{o}{in}\PY{l+s}{ }\PY{l+m}{1}\PY{o}{..}\PY{n+nv}{iterations}\PY{p}{)}\PY{l+s}{ }\PY{p}{(}\PY{l+s}{ }\PY{n+nf}{scope}\PY{p}{(} \PY{l+s}{ }\PY{l+s}{ }\PY{n+nv}{repeat}\PY{l+s}{ }\PY{p}{(}\PY{n+nv}{i}\PY{l+s}{ }\PY{o}{in}\PY{l+s}{ }\PY{l+m}{1}\PY{o}{..}\PY{n+nv}{iterations}\PY{p}{)}\PY{l+s}{ }\PY{p}{(}\PY{l+s}{ }\PY{n+nf}{scope}\PY{p}{(}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{if}\PY{l+s}{ }\PY{n+nf}{has\PYZus{}sol}\PY{p}{(}\PY{p}{)}\PY{l+s}{ }\PY{k}{then}\PY{l+s}{ }\PY{n+nf}{post}\PY{p}{(}\PY{n+nf}{uniformNeighbourhood}\PY{p}{(}\PY{n+nv}{vars}\PY{p}{,}\PY{n+nv}{destrRate}\PY{p}{)}\PY{p}{)} \PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{if}\PY{l+s}{ }\PY{n+nf}{has\PYZus{}sol}\PY{p}{(}\PY{p}{)}\PY{l+s}{ }\PY{k}{then}\PY{l+s}{ }\PY{n+nf}{post}\PY{p}{(}\PY{n+nf}{uniform\PYZus{}neighbourhood}\PY{p}{(}\PY{n+nv}{vars}\PY{p}{,}\PY{l+s}{ }\PY{n+nv}{destr\PYZus{}rate}\PY{p}{)}\PY{p}{)}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{else}\PY{l+s}{ }\PY{l}{true}\PY{l+s}{ }\PY{k}{endif}\PY{l+s}{ }\PY{o}{/\PYZbs{}} \PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{else}\PY{l+s}{ }\PY{l}{true}\PY{l+s}{ }\PY{k}{endif}\PY{l+s}{ }\PY{o}{/\PYZbs{}}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{n+nf}{time\PYZus{}limit}\PY{p}{(}\PY{n+nv}{exploreTime}\PY{p}{,}\PY{l+s}{ }\PY{n+nf}{minimize\PYZus{}bab}\PY{p}{(}\PY{n+nv}{obj}\PY{p}{)}\PY{p}{)}\PY{l+s}{ }\PY{o}{/\PYZbs{}} \PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{n+nf}{time\PYZus{}limit}\PY{p}{(}\PY{n+nv}{explore\PYZus{}time}\PY{p}{,}\PY{l+s}{ }\PY{n+nf}{minimize\PYZus{}bab}\PY{p}{(}\PY{n+nv}{obj}\PY{p}{)}\PY{p}{)}\PY{l+s}{ }\PY{o}{/\PYZbs{}}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{n+nf}{commit}\PY{p}{(}\PY{p}{)}\PY{l+s}{ }\PY{o}{/\PYZbs{}}\PY{l+s}{ }\PY{n+nf}{print}\PY{p}{(}\PY{p}{)} \PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{n+nf}{commit}\PY{p}{(}\PY{p}{)}\PY{l+s}{ }\PY{o}{/\PYZbs{}}\PY{l+s}{ }\PY{n+nf}{print}\PY{p}{(}\PY{p}{)}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{/\PYZbs{}}\PY{l+s}{ }\PY{n+nf}{post}\PY{p}{(}\PY{n+nv}{obj}\PY{l+s}{ }\PY{o}{\PYZlt{}}\PY{l+s}{ }\PY{n+nf}{sol}\PY{p}{(}\PY{n+nv}{obj}\PY{p}{)}\PY{p}{)}\PY{l+s}{ }\PY{g+gr}{)}\PY{p}{;} \PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{/\PYZbs{}}\PY{l+s}{ }\PY{n+nf}{post}\PY{p}{(}\PY{n+nv}{obj}\PY{l+s}{ }\PY{o}{\PYZlt{}}\PY{l+s}{ }\PY{n+nf}{sol}\PY{p}{(}\PY{n+nv}{obj}\PY{p}{)}\PY{p}{)}\PY{l+s}{ }\PY{g+gr}{)}\PY{p}{;}
\end{Verbatim} \end{Verbatim}

View File

@ -1,3 +1,3 @@
predicate uniformNeighbourhood(array[int] of var int: x, float: destrRate) = predicate uniform_neighbourhood(array[int] of var int: x, float: destrRate) =
forall(i in index_set(x)) forall(i in index_set(x))
(if uniform(0.0,1.0) > destrRate then x[i] = sol(x[i]) else true endif); (if uniform(0.0,1.0) > destrRate then x[i] = sol(x[i]) else true endif);

View File

@ -1,5 +1,5 @@
\begin{Verbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8},xleftmargin=5mm] \begin{Verbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8},xleftmargin=5mm]
\PY{k}{predicate}\PY{l+s}{ }\PY{n+nf}{uniformNeighbourhood}\PY{p}{(}\PY{k+kt}{array}\PY{p}{[}\PY{k+kt}{int}\PY{p}{]}\PY{l+s}{ }\PY{k+kt}{of}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{x}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{destrRate}\PY{p}{)}\PY{l+s}{ }\PY{o}{=} \PY{k}{predicate}\PY{l+s}{ }\PY{n+nf}{uniform\PYZus{}neighbourhood}\PY{p}{(}\PY{k+kt}{array}\PY{p}{[}\PY{k+kt}{int}\PY{p}{]}\PY{l+s}{ }\PY{k+kt}{of}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{x}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{destrRate}\PY{p}{)}\PY{l+s}{ }\PY{o}{=}
\PY{l+s}{ }\PY{l+s}{ }\PY{k}{forall}\PY{p}{(}\PY{n+nv}{i}\PY{l+s}{ }\PY{o}{in}\PY{l+s}{ }\PY{n+nb}{index\PYZus{}set}\PY{p}{(}\PY{n+nv}{x}\PY{p}{)}\PY{p}{)} \PY{l+s}{ }\PY{l+s}{ }\PY{k}{forall}\PY{p}{(}\PY{n+nv}{i}\PY{l+s}{ }\PY{o}{in}\PY{l+s}{ }\PY{n+nb}{index\PYZus{}set}\PY{p}{(}\PY{n+nv}{x}\PY{p}{)}\PY{p}{)}
\PY{l+s}{ }\PY{l+s}{ }\PY{p}{(}\PY{k}{if}\PY{l+s}{ }\PY{n+nf}{uniform}\PY{p}{(}\PY{l+m}{0.0}\PY{p}{,}\PY{l+m}{1.0}\PY{p}{)}\PY{l+s}{ }\PY{o}{\PYZgt{}}\PY{l+s}{ }\PY{n+nv}{destrRate}\PY{l+s}{ }\PY{k}{then}\PY{l+s}{ }\PY{n+nv}{x}\PY{p}{[}\PY{n+nv}{i}\PY{p}{]}\PY{l+s}{ }\PY{o}{=}\PY{l+s}{ }\PY{n+nf}{sol}\PY{p}{(}\PY{n+nv}{x}\PY{p}{[}\PY{n+nv}{i}\PY{p}{]}\PY{p}{)}\PY{l+s}{ }\PY{k}{else}\PY{l+s}{ }\PY{l}{true}\PY{l+s}{ }\PY{k}{endif}\PY{p}{)}\PY{p}{;} \PY{l+s}{ }\PY{l+s}{ }\PY{p}{(}\PY{k}{if}\PY{l+s}{ }\PY{n+nf}{uniform}\PY{p}{(}\PY{l+m}{0.0}\PY{p}{,}\PY{l+m}{1.0}\PY{p}{)}\PY{l+s}{ }\PY{o}{\PYZgt{}}\PY{l+s}{ }\PY{n+nv}{destrRate}\PY{l+s}{ }\PY{k}{then}\PY{l+s}{ }\PY{n+nv}{x}\PY{p}{[}\PY{n+nv}{i}\PY{p}{]}\PY{l+s}{ }\PY{o}{=}\PY{l+s}{ }\PY{n+nf}{sol}\PY{p}{(}\PY{n+nv}{x}\PY{p}{[}\PY{n+nv}{i}\PY{p}{]}\PY{p}{)}\PY{l+s}{ }\PY{k}{else}\PY{l+s}{ }\PY{l}{true}\PY{l+s}{ }\PY{k}{endif}\PY{p}{)}\PY{p}{;}
\end{Verbatim} \end{Verbatim}

View File

@ -1,6 +1,9 @@
\newcommand{\eg}{e.g.} \newcommand{\eg}{e.g.,}
\newcommand{\ie}{e.g.,}
\newcommand{\flatzinc}{\gls{flatzinc}} \newcommand{\flatzinc}{\gls{flatzinc}}
\newcommand{\microzinc}{\gls{microzinc}} \newcommand{\microzinc}{\gls{microzinc}}
\newcommand{\minisearch}{\gls{minisearch}} \newcommand{\minisearch}{\gls{minisearch}}
\newcommand{\minizinc}{\gls{minizinc}} \newcommand{\minizinc}{\gls{minizinc}}
\newcommand{\nanozinc}{\gls{nanozinc}} \newcommand{\nanozinc}{\gls{nanozinc}}
\newcommand{\cml}{\gls{constraint-modelling} language}
\newcommand{\cmls}{\gls{constraint-modelling} languages}

View File

@ -5,7 +5,7 @@
A goal shared between all programming languages is to provide a certain level of A goal shared between all programming languages is to provide a certain level of
abstraction: an assembly language allows you to abstract from the binary abstraction: an assembly language allows you to abstract from the binary
instructions and memory positions; Low-level imperial languages, like FORTRAN, instructions and memory positions; Low-level imperial languages, like FORTRAN,
were the first to allow you to abstract from the processor architecture of the were the first to allow you to abstract from the processor archITECTURE of the
target machine; and nowadays writing a program requires little knowledge of the target machine; and nowadays writing a program requires little knowledge of the
actual workings of the hardware. Freuder states that the ``Holy Grail'' of actual workings of the hardware. Freuder states that the ``Holy Grail'' of
programming languages would be where the user merely states the problem, and the programming languages would be where the user merely states the problem, and the

View File

@ -1,167 +1,173 @@
\chapter{Incremental Processing}\label{ch:incremental} \chapter{Incremental Processing}\label{ch:incremental}
%************************************************ %************************************************
Many applications require solving almost the same combinatorial problem In previous chapters we explored the compilation of constraint models for a
repeatedly, with only slight modifications, thousands of times. For example: \gls{solver} as a definitive linear process, but to solve real-world problems
\gls{meta-search} algorithms are often used. These methods usually require
solving almost the same combinatorial problem repeatedly, with only slight
modifications, thousands of times. Examples of these methods are:
\begin{itemize} \begin{itemize}
\item Multi-objective problems~\autocite{jones-2002-multi-objective} are often \item Multi-objective search \autocite{jones-2002-multi-objective}. Optimising
not supported directly in solvers. They can be solved using a multiple objectives is often not supported directly in solvers. Instead
meta-search approach: find a solution to a (single-objective) problem, it can be solved using a \gls{meta-search} approach: find a solution to
then add more constraints to the original problem and repeat. a (single-objective) problem, then add more constraints to the original
\item Large Neighbourhood Search~\autocite{shaw-1998-local-search} is a very problem and repeat.
successful meta-heuristic. After finding a (sub-optimal) solution to a \item \gls{lns} \autocite{shaw-1998-local-search}. This is a very successful
problem, constraints are added to restrict the search in the \gls{meta-search} algorithm to quickly improve solution quality. After
neighbourhood of that solution. When a new solution is found, the finding a (sub-optimal) solution to a problem, constraints are added to
constraints are removed, and constraints for a new neighbourhood are restrict the search in the \gls{neighbourhood} of that solution. When a
added. new solution is found, the constraints are removed, and constraints for
\item In Online Optimisation~\autocite{jaillet-2021-online}, a problem a new \gls{neighbourhood} are added.
instance is continuously updated with new data, such as newly available \item Online Optimisation \autocite{jaillet-2021-online}. These techniques can
jobs to be scheduled or customer requests to be processed. be employed when the problem rapidly changes. A problem instance is
\item Diverse Solution Search~\autocite{hebrard-2005-diverse} aims at continuously updated with new data, such as newly available jobs to be
providing a set of solutions that are sufficiently different from each scheduled or customer requests to be processed.
other, in order to give human decision makers an overview of the \item Diverse Solution Search \autocite{hebrard-2005-diverse}. Here we aim to
solution space. Diversity can be achieved by repeatedly solving a provide a set of solutions that are sufficiently different from each
problem instance with different objectives. other in order to give human decision makers an overview of the solution
\item In Interactive Search~\autocite{}, a user provides feedback space. Diversity can be achieved by repeatedly solving a problem
on decisions made by the solver. The feedback is added back into the instance with different objectives.
problem, and a new solution is generated. Users may also take back some % \item In Interactive Search \autocite{}, a user provides feedback on decisions
earlier feedback and explore different aspects of the problem. % made by the solver. The feedback is added back into the problem, and a
% new solution is generated. Users may also take back some earlier
% feedback and explore different aspects of the problem.
\end{itemize} \end{itemize}
All of these examples have in common that a problem instance is solved, new All of these examples have in common that a problem instance is solved, new
constraints are added, the resulting instance is solved again, and constraints constraints are added, the resulting instance is solved again, and constraints
may be removed again. may be removed again.
The usage of these methods is not new to \minizinc\ and they have proven to be The usage of these methods is not new to \gls{constraint-modelling} and they
very useful \autocite{rendl-2015-minisearch, schrijvers-2013-combinators, have proven to be very useful \autocite{schrijvers-2013-combinators,
dekker-2018-mzn-lns, schiendorfer-2018-minibrass}. In its most basic form, a rendl-2015-minisearch, schiendorfer-2018-minibrass, ek-2020-online,
simple scripting language is sufficient to implement these methods, by ingmar-2020-diverse}. In its most basic form, a simple scripting language is
repeatedly calling on \minizinc\ to flatten and solve the updated problems. sufficient to implement these methods, by repeatedly calling on the
While the techniques presented so far in this paper should already improve the \gls{constraint-modelling} infrastructure to compile and solve the adjusted
performance of these approaches, the overhead of re-flattening an almost constraint models. While improvements of the compilation of constraint models,
identical model may still prove prohibitive, warranting direct support for such as the ones discussed in previous chapters, can increase the performance of
adding and removing constraints in the \minizinc\ abstract machine. In this these approaches, the overhead of re-compiling an almost identical model may
section, we will see that our proposed architecture can be made still prove prohibitive, warranting direct support from the
\emph{incremental}, significantly improving efficiency for these iterative \gls{constraint-modelling} infrastructure. In this chapter we introduces two
solving approaches. methods to provide this support:
\begin{itemize}
\item We can add an interface for adding and removing constraints in the
\gls{constraint-modelling} infrastructure and avoid recompilation where
possible.
\item With a slight extension of existing solvers, we can compile
\gls{meta-search} algorithms into efficient solver-level specifications,
avoiding recompilation all-together.
\end{itemize}
\section{Modelling of Neighbourhoods and Meta-heuristics} The rest of the chapter is organised as follows. \Cref{sec:6-minisearch}
\label{section:2-modelling-nbhs} discusses \minisearch\ as a basis for extending \cmls\ with \gls{meta-search}
% capabilities. \Cref{sec:6-modelling} discusses how to extend a \cml\ to model
% Start with a brief review of most common neighbourhoods and then explain: the changes to be made by a \gls{meta-search} algorithm.
% \begin{itemize} \Cref{sec:6-incremental-compilation} introduces the method that extends the
% \item Random built in with integers \gls{constraint-modelling} infrastructure with an interface to add and remove
% \begin{itemize} constraints from an existing model while avoiding recompilation.
% \item Explain the built in \Cref{sec:6-solver-extension} introduces the method can compile some
% \item Give an example of use (use model) \gls{meta-search} algorithms into efficient solver-level specifications that
% \item Limitations if any only require a small extension of existing \glspl{solver}.
% \end{itemize} \Cref{sec:6-experiments} reports on the experimental results of both approaches.
Finally, \Cref{sec:6-conclusion} presents the conclusions.
% \item Solution based one \section{Meta-Search in \glsentrytext{minisearch}}
% \begin{itemize} \label{sec:6-minisearch}
% \item Explain the built in
% \item Give an example of use (use model)
% \item Limitations if any
% \end{itemize}
% \end{itemize}
% End with future work for other built ins (hint which ones would be useful). % Most LNS literature discusses neighbourhoods in terms of ``destroying'' part of
% a solution that is later repaired. However, from a declarative modelling point
% of view, it is more natural to see neighbourhoods as adding new constraints and
% variables that need to be applied to the base model, \eg\ forcing variables to
% take the same value as in the previous solution.
Most LNS literature discusses neighbourhoods in terms of ``destroying'' part of \minisearch\ \autocite{rendl-2015-minisearch} introduced a \minizinc\ extension
a solution that is later repaired. However, from a declarative modelling point that enables modellers to express meta-searches inside a \minizinc\ model. A
of view, it is more natural to see neighbourhoods as adding new constraints and meta-search in \minisearch\ typically solves a given \minizinc\ model, performs
variables that need to be applied to the base model, \eg forcing variables to some calculations on the solution, adds new constraints and then solves again.
take the same value as in the previous solution.
This section introduces a \minizinc\ extension that enables modellers to define Most \gls{meta-search} definitions in \minisearch\ consist of two parts. The
neighbourhoods using the $\mathit{nbh(a)}$ approach described above. This first part is a declarative definition of any restriction to the search space
extension is based on the constructs introduced in that the \gls{meta-search} algorithm might apply, called a \gls{neighbourhood}.
\minisearch\ \autocite{rendl-2015-minisearch}, as summarised below. In \minisearch\ these definitions can make use of the function:
\mzninline{function int: sol(var int: x)}, which returns the value that variable
\subsection{LNS in \glsentrytext{minisearch}} \mzninline{x} was assigned to in the previous solution (similar functions are
defined for Boolean, float and set variables). This allows the
\minisearch\ introduced a \minizinc\ extension that enables modellers to express \gls{neigbourhood} to be defined in terms of the previous solution. In addition,
meta-searches inside a \minizinc\ model. A meta-search in \minisearch\ typically a neighbourhood predicate will typically make use of the random number
solves a given \minizinc\ model, performs some calculations on the solution, generators available in the \minizinc\ standard library.
adds new constraints and then solves again.
An LNS definition in \minisearch\ consists of two parts. The first part is a
declarative definition of a neighbourhood as a \minizinc\ predicate that posts
the constraints that should be added with respect to a previous solution. This
makes use of the \minisearch\ function: \mzninline{function int: sol(var int:
x)}, which returns the value that variable \mzninline{x} was assigned to in
the previous solution (similar functions are defined for Boolean, float and set
variables). In addition, a neighbourhood predicate will typically make use of
the random number generators available in the \minizinc\ standard library.
\Cref{lst:6-lns-minisearch-pred} shows a simple random neighbourhood. For each \Cref{lst:6-lns-minisearch-pred} shows a simple random neighbourhood. For each
decision variable \mzninline{x[i]}, it draws a random number from a uniform decision variable \mzninline{x[i]}, it draws a random number from a uniform
distribution and, if it exceeds threshold \mzninline{destrRate}, posts distribution and, if it exceeds threshold \mzninline{destr_rate}, posts
constraints forcing \mzninline{x[i]} to take the same value as in the previous constraints forcing \mzninline{x[i]} to take the same value as in the previous
solution. For example, \mzninline{uniformNeighbourhood(x, 0.2)} would result in solution. For example, \mzninline{uniform_neighbourhood(x, 0.2)} would result in
each variable in the array \mzninline{x} having a 20\% chance of being each variable in the array \mzninline{x} having a 20\% chance of being
unconstrained, and an 80\% chance of being assigned to the value it had in the unconstrained, and an 80\% chance of being assigned to the value it had in the
previous solution. previous solution.
\begin{listing} \begin{listing}
\highlightfile{assets/mzn/6_lns_minisearch_pred.mzn} \highlightfile{assets/mzn/6_lns_minisearch_pred.mzn}
\caption{\label{lst:6-lns-minisearch-pred} A simple random LNS predicate \caption{\label{lst:6-lns-minisearch-pred} A simple random \gls{lns} predicate
implemented in \minisearch{}} implemented in \minisearch{}}
\end{listing} \end{listing}
\begin{listing} \begin{listing}
\highlightfile{assets/mzn/6_lns_minisearch.mzn} \highlightfile{assets/mzn/6_lns_minisearch.mzn}
\caption{\label{lst:6-lns-minisearch} A simple LNS metaheuristic implemented \caption{\label{lst:6-lns-minisearch} A simple \gls{lns} \gls{meta-search}
in \minisearch{}} implemented in \minisearch{}}
\end{listing} \end{listing}
The second part of a \minisearch\ LNS is the meta-search itself. The most basic The second part of a \minisearch\ \gls{meta-search} is the \gls{meta-search}
example is that of function \mzninline{lns} in \cref{lst:6-lns-minisearch}. It algorithm itself. \Cref{lst:6-lns-minisearch} shows a basic \minisearch\
implementation of a basic \gls{lns} algorithm, called \mzninline{lns}. It
performs a fixed number of iterations, each invoking the neighbourhood predicate performs a fixed number of iterations, each invoking the neighbourhood predicate
\mzninline{uniformNeighbourhood} in a fresh scope (so that the constraints only \mzninline{uniform_neighbourhood} in a fresh scope (so that the constraints only
affect the current loop iteration). It then searches for a solution affect the current loop iteration). It then searches for a solution
(\mzninline{minimize_bab}) with a given timeout, and if the search does return a (\mzninline{minimize_bab}) with a given timeout, and if the search does return a
new solution, it commits to that solution (so that it becomes available to the new solution, it commits to that solution (so that it becomes available to the
\mzninline{sol} function in subsequent iterations). The \texttt{lns} function \mzninline{sol} function in subsequent iterations). The \mzninline{lns} function
also posts the constraint \mzninline{obj < sol(obj)}, ensuring the objective also posts the constraint \mzninline{obj < sol(obj)}, ensuring the objective
value in the next iteration is strictly better than that of the current value in the next iteration is strictly better than that of the current
solution. solution.
\paragraph{Limitations of the \minisearch\ approach.} Although \minisearch\ enables the modeller to express \glspl{neighbourhood} in a
declarative way, the definition of the \gls{meta-search} algorithms is rather
unintuitive and difficult to debug, leading to unwieldy code for defining even
simple restarting strategies.
Although \minisearch\ enables the modeller to express \emph{neighbourhoods} in a \textbf{TODO:} Furthermore, the \minisearch\ implementation requires either a
declarative way, the definition of the \emph{meta-search} is rather unintuitive close integration of the backend solver into the \minisearch\ system, or it
and difficult to debug, leading to unwieldy code for defining simple restarting drives the solver through the regular text-file based \flatzinc\ interface,
strategies. Furthermore, the \minisearch\ implementation requires either a close leading to a significant communication overhead.
integration of the backend solver into the \minisearch\ system, or it drives the
solver through the regular text-file based \flatzinc\ interface, leading to a
significant communication overhead.
To address these two issues for LNS, we propose to keep modelling neighbourhoods % To address these two issues, we propose to keep modelling neighbourhoods as
as predicates, but define a small number of additional \minizinc\ built-in % predicates, but define a small number of additional \minizinc\ built-in
annotations and functions that (a) allow us to express important aspects of the % annotations and functions that (a) allow us to express important aspects of the
meta-search in a more convenient way, and (b) enable a simple compilation scheme % meta-search in a more convenient way, and (b) enable a simple compilation scheme
that requires no additional communication with and only small, simple extensions % that requires no additional communication with and only small, simple extensions
of the backend solver. % of the backend solver.
The approach we follow here is therefore to \textbf{extend \flatzinc}, such that % The approach we follow here is therefore to \textbf{extend \flatzinc}, such that
the definition of neighbourhoods can be communicated to the solver together with % the definition of neighbourhoods can be communicated to the solver together with
the problem instance. This maintains the loose coupling of \minizinc\ and % the problem instance. This maintains the loose coupling of \minizinc\ and
solver, while avoiding the costly communication and cold-starting of the % solver, while avoiding the costly communication and cold-starting of the
black-box approach. % black-box approach.
\subsection{Restart annotations}
\section{Modelling of Meta-Search}
\label{sec:6-modelling}
Instead of the complex \minisearch\ definitions, we propose to add support for Instead of the complex \minisearch\ definitions, we propose to add support for
simple meta-searches that are purely based on the notion of \emph{restarts}. A simple meta-searches that are purely based on the notion of \emph{restarts}. A
restart happens when a solver abandons its current search efforts, returns to restart happens when a solver abandons its current search efforts, returns to
the root node of the search tree, and begins a new exploration. Many CP solvers the root node of the search tree, and begins a new exploration. Many \gls{cp}
already provide support for controlling their restarting behaviour, e.g.\ they solvers already provide support for controlling their restarting behaviour,
can periodically restart after a certain number of nodes, or restart for every e.g.\ they can periodically restart after a certain number of nodes, or restart
solution. Typically, solvers also support posting additional constraints upon for every solution. Typically, solvers also support posting additional
restarting (e.g Comet~\autocite{michel-2005-comet}) that are only valid for the constraints upon restarting (\eg\ Comet \autocite{michel-2005-comet}) that are
particular restart (i.e., they are ``retracted'' for the next restart). only valid for the particular restart (\ie\ they are ``retracted'' for the next
restart).
In its simplest form, we can therefore implement LNS by specifying a In its simplest form, we can therefore implement LNS by specifying a
neighbourhood predicate, and annotating the \mzninline{solve} item to indicate neighbourhood predicate, and annotating the \mzninline{solve} item to indicate
@ -178,7 +184,7 @@ restart as a string (see the definition of the new \mzninline{on_restart}
annotation in \cref{lst:6-restart-ann}). annotation in \cref{lst:6-restart-ann}).
The second component of our LNS definition is the \emph{restarting strategy}, The second component of our LNS definition is the \emph{restarting strategy},
defining how much effort the solver should put into each neighbourhood (i.e., defining how much effort the solver should put into each neighbourhood (\ie\
restart), and when to stop the overall search. restart), and when to stop the overall search.
We propose adding new search annotations to \minizinc\ to control this behaviour We propose adding new search annotations to \minizinc\ to control this behaviour
@ -210,7 +216,7 @@ round-robin fashion.
In \minisearch\, adaptive or round-robin approaches can be implemented using In \minisearch\, adaptive or round-robin approaches can be implemented using
\emph{state variables}, which support destructive update (overwriting the value \emph{state variables}, which support destructive update (overwriting the value
they store). In this way, the \minisearch\ strategy can store values to be used they store). In this way, the \minisearch\ strategy can store values to be used
in later iterations. We use the \emph{solver state} instead, i.e., normal in later iterations. We use the \emph{solver state} instead, \ie\ normal
decision variables, and define two simple built-in functions to access the decision variables, and define two simple built-in functions to access the
solver state \emph{of the previous restart}. This approach is sufficient for solver state \emph{of the previous restart}. This approach is sufficient for
expressing neighbourhood selection strategies, and its implementation is much expressing neighbourhood selection strategies, and its implementation is much
@ -236,9 +242,9 @@ undefined if \mzninline{status()=START}).
In order to be able to initialise the variables used for state access, we In order to be able to initialise the variables used for state access, we
reinterpret \mzninline{on_restart} so that the predicate is also called for the reinterpret \mzninline{on_restart} so that the predicate is also called for the
initial search (i.e., before the first ``real'' restart) with the same initial search (\ie\ before the first ``real'' restart) with the same semantics,
semantics, that is, any constraint posted by the predicate will be retracted for that is, any constraint posted by the predicate will be retracted for the next
the next restart. restart.
\paragraph{Parametric neighbourhood selection predicates} \paragraph{Parametric neighbourhood selection predicates}
@ -330,7 +336,9 @@ express:
\highlightfile{assets/mzn/6_simulated_annealing.mzn} \highlightfile{assets/mzn/6_simulated_annealing.mzn}
\section{Incremental Flattening}
\section{An Incremental Interface for Constraint Modelling Languages}
\label{sec:6-incremental-compilation}
In order to support incremental flattening, the \nanozinc\ interpreter must be In order to support incremental flattening, the \nanozinc\ interpreter must be
able to process \nanozinc\ calls \emph{added} to an existing \nanozinc\ program, able to process \nanozinc\ calls \emph{added} to an existing \nanozinc\ program,
@ -395,7 +403,7 @@ trailing.
the changes recorded in the trail, in reverse order. the changes recorded in the trail, in reverse order.
\end{example} \end{example}
\section{Incremental Solving} \subsection{Incremental Solving}
Ideally, the incremental changes made by the interpreter would also be applied Ideally, the incremental changes made by the interpreter would also be applied
incrementally to the solver. This requires the solver to support both the incrementally to the solver. This requires the solver to support both the
@ -419,8 +427,8 @@ therefore support solvers with different levels of an incremental interface:
point. point.
\end{itemize} \end{itemize}
\section{Solver Executable Meta-Search}
\section{Compilation of Neighbourhoods} \label{section:compilation} \label{sec:6-solver-extension}
The neighbourhoods defined in the previous section can be executed with The neighbourhoods defined in the previous section can be executed with
\minisearch\ by adding support for the \mzninline{status} and \minisearch\ by adding support for the \mzninline{status} and
@ -428,14 +436,14 @@ The neighbourhoods defined in the previous section can be executed with
The \minisearch{} evaluator will then call a solver to produce a solution, and The \minisearch{} evaluator will then call a solver to produce a solution, and
evaluate the neighbourhood predicate, incrementally producing new \flatzinc\ to evaluate the neighbourhood predicate, incrementally producing new \flatzinc\ to
be added to the next round of solving. be added to the next round of solving.
%
While this is a viable approach, our goal is to keep the compiler and solver While this is a viable approach, our goal is to keep the compiler and solver
separate, by embedding the entire LNS specification into the \flatzinc\ that is separate, by embedding the entire LNS specification into the \flatzinc\ that is
passed to the solver. passed to the solver.
%
This section introduces such a compilation approach. It only requires simple This section introduces such a compilation approach. It only requires simple
modifications of the \minizinc\ compiler, and the compiled \flatzinc\ can be modifications of the \minizinc\ compiler, and the compiled \flatzinc\ can be
executed by standard CP solvers with a small set of simple extensions. executed by standard \gls{cp} solvers with a small set of simple extensions.
\subsection{Compilation overview} \subsection{Compilation overview}
@ -643,7 +651,9 @@ against being invoked before \mzninline{status()!=START}, since the
solution has been recorded yet, but we use this simple example to illustrate solution has been recorded yet, but we use this simple example to illustrate
how these Boolean conditions are compiled and evaluated. how these Boolean conditions are compiled and evaluated.
\section{Experiments} \section{Experiments}
\label{sec:6-experiments}
We have created a prototype implementation of the architecture presented in the We have created a prototype implementation of the architecture presented in the
preceding sections. It consists of a compiler from \minizinc\ to \microzinc, and preceding sections. It consists of a compiler from \minizinc\ to \microzinc, and
@ -771,7 +781,7 @@ specifications can (a) be effective and (b) incur only a small overhead compared
to a dedicated implementation of the neighbourhoods. to a dedicated implementation of the neighbourhoods.
To measure the overhead, we implemented our new approach in To measure the overhead, we implemented our new approach in
Gecode~\cite{gecode-2021-gecode}. The resulting solver (\gecodeMzn in the tables Gecode~\autocite{gecode-2021-gecode}. The resulting solver (\gecodeMzn in the tables
below) has been instrumented to also output the domains of all model variables below) has been instrumented to also output the domains of all model variables
after propagating the new special constraints. We implemented another extension after propagating the new special constraints. We implemented another extension
to Gecode (\gecodeReplay) that simply reads the stream of variable domains for to Gecode (\gecodeReplay) that simply reads the stream of variable domains for
@ -793,7 +803,7 @@ MacOS High Sierra. LNS benchmarks are repeated with 10 different random seeds
and the average is shown. The overall timeout for each run is 120 seconds. and the average is shown. The overall timeout for each run is 120 seconds.
We ran experiments for three models from the MiniZinc We ran experiments for three models from the MiniZinc
challenge~\cite{stuckey-2010-challenge, stuckey-2014-challenge} (\texttt{gbac}, challenge~\autocite{stuckey-2010-challenge, stuckey-2014-challenge} (\texttt{gbac},
\texttt{steelmillslab}, and \texttt{rcpsp-wet}). The best objective found during \texttt{steelmillslab}, and \texttt{rcpsp-wet}). The best objective found during
the \minizinc\ Challenge is shown for every instance (\emph{best known}). the \minizinc\ Challenge is shown for every instance (\emph{best known}).
@ -823,7 +833,7 @@ The Generalised Balanced Academic Curriculum problem comprises courses having a
specified number of credits and lasting a certain number of periods, load limits specified number of credits and lasting a certain number of periods, load limits
of courses for each period, prerequisites for courses, and preferences of of courses for each period, prerequisites for courses, and preferences of
teaching periods for professors. A detailed description of the problem is given teaching periods for professors. A detailed description of the problem is given
in~\cite{chiarandini-2012-gbac}. The main decisions are to assign courses to in~\autocite{chiarandini-2012-gbac}. The main decisions are to assign courses to
periods, which is done via the variables \mzninline{period_of} in the model. periods, which is done via the variables \mzninline{period_of} in the model.
\cref{lst:6-free-period} shows the neighbourhood chosen, which randomly picks one \cref{lst:6-free-period} shows the neighbourhood chosen, which randomly picks one
period and frees all courses that are assigned to it. period and frees all courses that are assigned to it.
@ -916,3 +926,7 @@ fewer nodes per second than \gecodeReplay. This overhead is caused by
propagating the additional constraints in \gecodeMzn. Overall, the experiments propagating the additional constraints in \gecodeMzn. Overall, the experiments
demonstrate that the compilation approach is an effective and efficient way of demonstrate that the compilation approach is an effective and efficient way of
adding LNS to a modelling language with minimal changes to the solver. adding LNS to a modelling language with minimal changes to the solver.
\section{Conclusions}
\label{sec:6-conclusion}