Add initial incremental section from Abstract Machine paper

This commit is contained in:
Jip J. Dekker 2021-02-10 13:58:42 +11:00
parent c48148ffaf
commit 6a5fe8d077
No known key found for this signature in database
GPG Key ID: 517DF4A00618C9C3
22 changed files with 574 additions and 28 deletions

View File

@ -1,3 +1,37 @@
@article{baatar-2011-radiation,
author = {Davaatseren Baatar and Natashia Boland and Sebastian Brand
and Peter J. Stuckey},
title = {{CP} and {IP} approaches to cancer radiotherapy delivery
optimization},
journal = {Constraints An Int. J.},
volume = 16,
number = 2,
pages = {173--194},
year = 2011,
url = {https://doi.org/10.1007/s10601-010-9104-1},
doi = {10.1007/s10601-010-9104-1},
timestamp = {Fri, 13 Mar 2020 10:58:27 +0100},
biburl = {https://dblp.org/rec/journals/constraints/BaatarBBS11.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@article{chiarandini-2012-gbac,
author = {Marco Chiarandini and Luca Di Gaspero and Stefano Gualandi
and Andrea Schaerf},
title = {The balanced academic curriculum problem revisited},
journal = {J. Heuristics},
volume = 18,
number = 1,
pages = {119--148},
year = 2012,
url = {https://doi.org/10.1007/s10732-011-9158-2},
doi = {10.1007/s10732-011-9158-2},
timestamp = {Fri, 30 Nov 2018 13:23:27 +0100},
biburl =
{https://dblp.org/rec/journals/heuristics/ChiarandiniGGS12.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@article{freuder-1997-holygrail, @article{freuder-1997-holygrail,
author = {Eugene C. Freuder}, author = {Eugene C. Freuder},
title = {In Pursuit of the Holy Grail}, title = {In Pursuit of the Holy Grail},
@ -13,6 +47,51 @@
bibsource = {dblp computer science bibliography, https://dblp.org} bibsource = {dblp computer science bibliography, https://dblp.org}
} }
@inproceedings{hebrard-2005-diverse,
author = {Emmanuel Hebrard and Brahim Hnich and Barry O'Sullivan and
Toby Walsh},
editor = {Manuela M. Veloso and Subbarao Kambhampati},
title = {Finding Diverse and Similar Solutions in Constraint
Programming},
booktitle = {Proceedings, The Twentieth National Conference on
Artificial Intelligence and the Seventeenth Innovative
Applications of Artificial Intelligence Conference, July 9-13,
2005, Pittsburgh, Pennsylvania, {USA}},
pages = {372--377},
publisher = {{AAAI} Press / The {MIT} Press},
year = 2005,
url = {http://www.aaai.org/Library/AAAI/2005/aaai05-059.php},
timestamp = {Mon, 10 Dec 2012 15:34:42 +0100},
biburl = {https://dblp.org/rec/conf/aaai/HebrardHOW05.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@book{jaillet-2021-online,
title = {Online Optimization},
author = {Jaillet, P. and Wagner, M.R.},
isbn = 9780387717715,
series = {International Series in Operations Research \& Management
Science},
year = 2021,
publisher = {Springer US}
}
@article{jones-2002-multi-objective,
author = {Dylan F. Jones and S. Keyvan Mirrazavi and Mehrdad Tamiz},
title = {Multi-objective meta-heuristics: An overview of the current
state-of-the-art},
journal = {Eur. J. Oper. Res.},
volume = 137,
number = 1,
pages = {1--9},
year = 2002,
url = {https://doi.org/10.1016/S0377-2217(01)00123-0},
doi = {10.1016/S0377-2217(01)00123-0},
timestamp = {Fri, 21 Feb 2020 13:15:05 +0100},
biburl = {https://dblp.org/rec/journals/eor/JonesMT02.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@book{marriott-1998-clp, @book{marriott-1998-clp,
location = {Cambridge, Mass}, location = {Cambridge, Mass},
title = {Programming with constraints: an introduction}, title = {Programming with constraints: an introduction},
@ -97,6 +176,60 @@
bibsource = {dblp computer science bibliography, https://dblp.org} bibsource = {dblp computer science bibliography, https://dblp.org}
} }
@article{schiendorfer-2018-minibrass,
author = {Alexander Schiendorfer and Alexander Knapp and Gerrit
Anders and Wolfgang Reif},
title = {MiniBrass: Soft constraints for MiniZinc},
journal = {Constraints An Int. J.},
volume = 23,
number = 4,
pages = {403--450},
year = 2018,
url = {https://doi.org/10.1007/s10601-018-9289-2},
doi = {10.1007/s10601-018-9289-2},
timestamp = {Mon, 26 Oct 2020 09:00:47 +0100},
biburl =
{https://dblp.org/rec/journals/constraints/SchiendorferKAR18.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@article{schrijvers-2013-combinators,
author = {Tom Schrijvers and Guido Tack and Pieter Wuille and Horst
Samulowitz and Peter J. Stuckey},
title = {Search combinators},
journal = {Constraints An Int. J.},
volume = 18,
number = 2,
pages = {269--305},
year = 2013,
url = {https://doi.org/10.1007/s10601-012-9137-8},
doi = {10.1007/s10601-012-9137-8},
timestamp = {Fri, 13 Mar 2020 10:58:29 +0100},
biburl =
{https://dblp.org/rec/journals/constraints/SchrijversTWSS13.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@inproceedings{shaw-1998-local-search,
author = {Paul Shaw},
editor = {Michael J. Maher and Jean{-}Francois Puget},
title = {Using Constraint Programming and Local Search Methods to
Solve Vehicle Routing Problems},
booktitle = {Principles and Practice of Constraint Programming - CP98,
4th International Conference, Pisa, Italy, October 26-30,
1998, Proceedings},
series = {Lecture Notes in Computer Science},
volume = 1520,
pages = {417--431},
publisher = {Springer},
year = 1998,
url = {https://doi.org/10.1007/3-540-49481-2_30},
doi = {10.1007/3-540-49481-2_30},
timestamp = {Tue, 14 May 2019 10:00:45 +0200},
biburl = {https://dblp.org/rec/conf/cp/Shaw98.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@book{silvano-1990-knapsack, @book{silvano-1990-knapsack,
author = {Martello, Silvano and Toth, Paolo}, author = {Martello, Silvano and Toth, Paolo},
title = {Knapsack Problems: Algorithms and Computer Implementations}, title = {Knapsack Problems: Algorithms and Computer Implementations},
@ -125,3 +258,11 @@
biburl = {https://dblp.org/rec/conf/cpaior/StuckeyT13.bib}, biburl = {https://dblp.org/rec/conf/cpaior/StuckeyT13.bib},
bibsource = {dblp computer science bibliography, https://dblp.org} bibsource = {dblp computer science bibliography, https://dblp.org}
} }
@article{warren-1983-wam,
title = {An abstract Prolog instruction set},
author = {Warren, David HD},
journal = {Technical note 309},
year = 1983,
publisher = {SRI International}
}

View File

@ -21,6 +21,10 @@
name={Flat\-Zinc}, name={Flat\-Zinc},
description={TODO}, description={TODO},
} }
\newglossaryentry{microzinc}{
name={Micro\-Zinc},
description={TODO},
}
\newglossaryentry{minisearch}{ \newglossaryentry{minisearch}{
name={Mini\-Search}, name={Mini\-Search},
description={TODO}, description={TODO},
@ -29,6 +33,10 @@
name={Mini\-Zinc}, name={Mini\-Zinc},
description={TODO}, description={TODO},
} }
\newglossaryentry{nanozinc}{
name={Nano\-Zinc},
description={TODO},
}
\newglossaryentry{solver}{ \newglossaryentry{solver}{
name={solver}, name={solver},
description={TODO}, description={TODO},

Binary file not shown.

Binary file not shown.

BIN
assets/img/6_gbac.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

BIN
assets/img/6_radiation.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

View File

@ -0,0 +1,4 @@
c @$\mapsto$@ true @$\sep$@ []
x @$\mapsto$@ mkvar(-10..10) @$\sep$@ []
y @$\mapsto$@ mkvar(-10..10) @$\sep$@ []
true @$\mapsto$@ true @$\sep$@ []

View File

@ -0,0 +1,6 @@
\begin{BVerbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}]
\PY{n+nv}{c}\PY{l+s}{ }\PY{esc}{$\mapsto$}\PY{l+s}{ }\PY{l}{true}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\PY{n+nv}{x}\PY{l+s}{ }\PY{esc}{$\mapsto$}\PY{l+s}{ }\PY{n+nf}{mkvar}\PY{p}{(}\PY{o}{\PYZhy{}}\PY{l+m}{10}\PY{o}{..}\PY{l+m}{10}\PY{g+gr}{)}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\PY{n+nv}{y}\PY{l+s}{ }\PY{esc}{$\mapsto$}\PY{l+s}{ }\PY{n+nf}{mkvar}\PY{p}{(}\PY{o}{\PYZhy{}}\PY{l+m}{10}\PY{o}{..}\PY{l+m}{10}\PY{g+gr}{)}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\PY{l}{true}\PY{l+s}{ }\PY{esc}{$\mapsto$}\PY{l+s}{ }\PY{l}{true}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\end{BVerbatim}

View File

@ -0,0 +1,14 @@
% Posted c
true @$\lhd$@ -[c]
% Propagated c = true
c @$\mapsfrom$@ mkvar(0,1) @$\sep$@ []
true @$\lhd$@ +[c]
% Simplified bool_or(b1, true) = true
b2 @$\mapsfrom$@ bool_or(b1, c) @$\sep$@ []
true @$\lhd$@ +[b2]
% b1 became unused...
b1 @$\mapsfrom$@ int_gt(t, y) @$\sep$@ []
% causing t, then b0 and finally z to become unused
t @$\mapsfrom$@ z @$\sep$@ [b0]
b0 @$\mapsfrom$@ int_abs(x, z) @$\sep$@ []
z @$\mapsfrom$@ mkvar(-infinity,infinity) @$\sep$@ []

View File

@ -0,0 +1,16 @@
\begin{BVerbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}]
\PY{c}{\PYZpc{} Posted c}
\PY{l}{true}\PY{l+s}{ }\PY{esc}{$\lhd$}\PY{l+s}{ }\PY{o}{\PYZhy{}}\PY{p}{[}\PY{n+nv}{c}\PY{g+gr}{]}
\PY{c}{\PYZpc{} Propagated c = true}
\PY{n+nv}{c}\PY{l+s}{ }\PY{esc}{$\mapsfrom$}\PY{l+s}{ }\PY{n+nf}{mkvar}\PY{p}{(}\PY{l+m}{0,1}\PY{g+gr}{)}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\PY{l}{true}\PY{l+s}{ }\PY{esc}{$\lhd$}\PY{l+s}{ }\PY{o}{+}\PY{p}{[}\PY{n+nv}{c}\PY{g+gr}{]}
\PY{c}{\PYZpc{} Simplified bool_or(b1, true) = true}
\PY{n+nv}{b2}\PY{l+s}{ }\PY{esc}{$\mapsfrom$}\PY{l+s}{ }\PY{n+nf}{bool\PYZus{}or}\PY{p}{(}\PY{n+nv}{b1}\PY{p}{,}\PY{l+s}{ }\PY{n+nv}{c}\PY{g+gr}{)}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\PY{l}{true}\PY{l+s}{ }\PY{esc}{$\lhd$}\PY{l+s}{ }\PY{o}{+}\PY{p}{[}\PY{n+nv}{b2}\PY{g+gr}{]}
\PY{c}{\PYZpc{} b1 became unused...}
\PY{n+nv}{b1}\PY{l+s}{ }\PY{esc}{$\mapsfrom$}\PY{l+s}{ }\PY{n+nf}{int\PYZus{}gt}\PY{p}{(}\PY{n+nv}{t}\PY{p}{,}\PY{l+s}{ }\PY{n+nv}{y}\PY{g+gr}{)}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\PY{c}{\PYZpc{} causing t, then b0 and finally z to become unused}
\PY{n+nv}{t}\PY{l+s}{ }\PY{esc}{$\mapsfrom$}\PY{l+s}{ }\PY{n+nv}{z}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{n+nv}{b0}\PY{g+gr}{]}
\PY{n+nv}{b0}\PY{l+s}{ }\PY{esc}{$\mapsfrom$}\PY{l+s}{ }\PY{n+nf}{int\PYZus{}abs}\PY{p}{(}\PY{n+nv}{x}\PY{p}{,}\PY{l+s}{ }\PY{n+nv}{z}\PY{g+gr}{)}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\PY{n+nv}{z}\PY{l+s}{ }\PY{esc}{$\mapsfrom$}\PY{l+s}{ }\PY{n+nf}{mkvar}\PY{p}{(}\PY{o}{\PYZhy{}}\PY{n+nv}{infinity}\PY{p}{,}\PY{n+nv}{infinity}\PY{g+gr}{)}\PY{l+s}{ }\PY{esc}{$\sep$}\PY{l+s}{ }\PY{p}{[}\PY{g+gr}{]}
\end{BVerbatim}

View File

@ -0,0 +1,4 @@
predicate random_allocation(array[int] of int: sol) =
forall(i in courses) (
(uniform(0,99) < 80) -> (period_of[i] == sol[i])
);

View File

@ -0,0 +1,6 @@
\begin{BVerbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}]
\PY{k}{predicate}\PY{l+s}{ }\PY{n+nf}{random\PYZus{}allocation}\PY{p}{(}\PY{k+kt}{array}\PY{p}{[}\PY{k+kt}{int}\PY{g+gr}{]}\PY{l+s}{ }\PY{k+kt}{of}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{sol}\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{=}
\PY{k}{forall}\PY{p}{(}\PY{n+nv}{i}\PY{l+s}{ }\PY{o}{in}\PY{l+s}{ }\PY{n+nv}{courses}\PY{g+gr}{)}\PY{l+s}{ }\PY{p}{(}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{p}{(}\PY{n+nf}{uniform}\PY{p}{(}\PY{l+m}{0,99}\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{\PYZlt{}}\PY{l+s}{ }\PY{l+m}{80}\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{\PYZhy{}\PYZgt{}}\PY{l+s}{ }\PY{p}{(}\PY{n+nv}{period\PYZus{}of}\PY{p}{[}\PY{n+nv}{i}\PY{g+gr}{]}\PY{l+s}{ }\PY{o}{==}\PY{l+s}{ }\PY{n+nv}{sol}\PY{p}{[}\PY{n+nv}{i}\PY{g+gr}{]}\PY{g+gr}{)}
\PY{g+gr}{)}\PY{p}{;}
\end{BVerbatim}

View File

@ -0,0 +1,2 @@
constraint x < 10;
constraint y < x;

View File

@ -0,0 +1,4 @@
\begin{BVerbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}]
\PY{k}{constraint}\PY{l+s}{ }\PY{n+nv}{x}\PY{l+s}{ }\PY{o}{\PYZlt{}}\PY{l+s}{ }\PY{l+m}{10}\PY{p}{;}
\PY{k}{constraint}\PY{l+s}{ }\PY{n+nv}{y}\PY{l+s}{ }\PY{o}{\PYZlt{}}\PY{l+s}{ }\PY{n+nv}{x}\PY{p}{;}
\end{BVerbatim}

View File

@ -0,0 +1,8 @@
predicate int_sol(var int: x, var int: xi);
function int: sol(var int: x) =
if is_fixed(x) then fix(x)
else let {
var lb(x)..ub(x): xi;
constraint int_sol(x,xi);
} in xi;
endif;

View File

@ -0,0 +1,10 @@
\begin{BVerbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}]
\PY{k}{predicate}\PY{l+s}{ }\PY{n+nf}{int\PYZus{}sol}\PY{p}{(}\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{x}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{xi}\PY{g+gr}{)}\PY{p}{;}
\PY{k}{function}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nf}{sol}\PY{p}{(}\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{int}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{x}\PY{g+gr}{)}\PY{l+s}{ }\PY{o}{=}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{if}\PY{l+s}{ }\PY{n+nb}{is\PYZus{}fixed}\PY{p}{(}\PY{n+nv}{x}\PY{g+gr}{)}\PY{l+s}{ }\PY{k}{then}\PY{l+s}{ }\PY{n+nb}{fix}\PY{p}{(}\PY{n+nv}{x}\PY{g+gr}{)}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{else}\PY{l+s}{ }\PY{k}{let}\PY{l+s}{ }\PY{p}{\PYZob{}}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{n+nb}{lb}\PY{p}{(}\PY{n+nv}{x}\PY{g+gr}{)}\PY{l+s}{.}\PY{l+s}{.}\PY{n+nb}{ub}\PY{p}{(}\PY{n+nv}{x}\PY{g+gr}{)}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{xi}\PY{p}{;}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{constraint}\PY{l+s}{ }\PY{n+nf}{int\PYZus{}sol}\PY{p}{(}\PY{n+nv}{x}\PY{p}{,}\PY{n+nv}{xi}\PY{g+gr}{)}\PY{p}{;}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{g+gr}{\PYZcb{}}\PY{l+s}{ }\PY{o}{in}\PY{l+s}{ }\PY{n+nv}{xi}\PY{p}{;}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{endif}\PY{p}{;}
\end{BVerbatim}

View File

@ -0,0 +1,6 @@
predicate float_uniform(var float:l, var float: u, var float: r);
function var float: uniform_nbh(var float: l, var float: u) :: impure =
let {
var lb(l)..ub(u): rnd;
constraint float_uniform(l,u,rnd):
} in rnd;

View File

@ -0,0 +1,8 @@
\begin{BVerbatim}[commandchars=\\\{\},numbers=left,firstnumber=1,stepnumber=1,codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}]
\PY{k}{predicate}\PY{l+s}{ }\PY{n+nf}{float\PYZus{}uniform}\PY{p}{(}\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{n+nv}{l}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{u}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{r}\PY{g+gr}{)}\PY{p}{;}
\PY{k}{function}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nf}{uniform\PYZus{}nbh}\PY{p}{(}\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{l}\PY{p}{,}\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{k+kt}{float}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{u}\PY{g+gr}{)}\PY{l+s}{ }\PY{p}{:}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{impure}\PY{l+s}{ }\PY{o}{=}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{let}\PY{l+s}{ }\PY{p}{\PYZob{}}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k+kt}{var}\PY{l+s}{ }\PY{n+nb}{lb}\PY{p}{(}\PY{n+nv}{l}\PY{g+gr}{)}\PY{l+s}{.}\PY{l+s}{.}\PY{n+nb}{ub}\PY{p}{(}\PY{n+nv}{u}\PY{g+gr}{)}\PY{p}{:}\PY{l+s}{ }\PY{n+nv}{rnd}\PY{p}{;}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{k}{constraint}\PY{l+s}{ }\PY{n+nf}{float\PYZus{}uniform}\PY{p}{(}\PY{n+nv}{l}\PY{p}{,}\PY{n+nv}{u}\PY{p}{,}\PY{n+nv}{rnd}\PY{g+gr}{)}\PY{p}{:}
\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{l+s}{ }\PY{g+gr}{\PYZcb{}}\PY{l+s}{ }\PY{o}{in}\PY{l+s}{ }\PY{n+nv}{rnd}\PY{p}{;}
\end{BVerbatim}

View File

@ -36,6 +36,8 @@ ItalicFont=*-Italic,
BoldItalicFont=*-BoldItalic, BoldItalicFont=*-BoldItalic,
] ]
%% Mathmatical font %% Mathmatical font
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage{unicode-math} \usepackage{unicode-math}
\setmathfont{GFSNeohellenicMath.otf} \setmathfont{GFSNeohellenicMath.otf}
@ -47,10 +49,19 @@ style=apa,
% Glossary / Acronyms % Glossary / Acronyms
\usepackage[acronym,toc]{glossaries} \usepackage[acronym,toc]{glossaries}
\glsdisablehyper \glsdisablehyper{}
\defglsentryfmt[main]{\ifglsused{\glslabel}{\glsgenentryfmt}{\textit{\glsgenentryfmt}}} \defglsentryfmt[main]{\ifglsused{\glslabel}{\glsgenentryfmt}{\textit{\glsgenentryfmt}}}
\makeglossaries{} \makeglossaries{}
% Graphics
\usepackage{graphicx}
\usepackage{subcaption}
% Example environment
\newcounter{example}[chapter]
\newenvironment{example}[1][]{\refstepcounter{example}\par\medskip \noindent
\textbf{Example~\theexample. #1} \rmfamily}{\hfill \ensuremath{\square}}
% Code formatting % Code formatting
\usepackage{fancyvrb} \usepackage{fancyvrb}
\usepackage{color} \usepackage{color}
@ -61,11 +72,12 @@ type=listing,
float, float,
name=Listing, name=Listing,
counterwithin=chapter, counterwithin=chapter,
atbegin={% listname={List of Source Listings},
atbegin={
\centering \centering
\scriptsize \scriptsize
} }
]{program} ]{listing}
\crefname{listing}{listing}{listings} \crefname{listing}{listing}{listings}
\newcommand{\Vlabel}[1]{\label[line]{#1}\hypertarget{#1}{}} \newcommand{\Vlabel}[1]{\label[line]{#1}\hypertarget{#1}{}}

View File

@ -1,4 +1,6 @@
\newcommand{\eg}{e.g.} \newcommand{\eg}{e.g.}
\newcommand{\flatzinc}{\gls{flatzinc}} \newcommand{\flatzinc}{\gls{flatzinc}}
\newcommand{\microzinc}{\gls{microzinc}}
\newcommand{\minisearch}{\gls{minisearch}} \newcommand{\minisearch}{\gls{minisearch}}
\newcommand{\minizinc}{\gls{minizinc}} \newcommand{\minizinc}{\gls{minizinc}}
\newcommand{\nanozinc}{\gls{nanozinc}}

View File

@ -1,3 +1,70 @@
%************************************************ %************************************************
\chapter{Rewriting Constraint Modelling Languages}\label{ch:rewriting} \chapter{Rewriting Constraint Modelling Languages}\label{ch:rewriting}
%************************************************ %************************************************
\section{Experiments}
We have created a prototype implementation of the architecture presented in the
preceding sections. It consists of a compiler from \minizinc\ to \microzinc, and
an incremental \microzinc\ interpreter producing \nanozinc. The system supports
a significant subset of the full \minizinc\ language; notable features that are
missing are support for set and float variables, option types, and compilation
of model output expressions and annotations. We will release our implementation
under an open-source license and can make it available to the reviewers upon
request.
The implementation is not optimised for performance yet, but was created as a
faithful implementation of the developed concepts, in order to evaluate their
suitability and provide a solid baseline for future improvements. In the
following we present experimental results on basic flattening performance as
well as incremental flattening and solving that demonstrate the efficiency
gains that are possible thanks to the new architecture.
\subsection{Basic Flattening}
As a first experiment, we selected 20 models from the annual \minizinc\
challenge and compiled 5 instances of each model to \flatzinc, using the current
\minizinc\ release version 2.4.3 and the new prototype system. In both cases we
use the standard \minizinc\ library of global constraints (i.e., we decompose
those constraints rather than using solver built-ins, in order to stress-test
the flattening). We measured pure flattening time, i.e., without time required
to parse and typecheck in version 2.4.3, and without time required for
compilation to \microzinc\ in the new system (compilation is usually very fast).
Times are averages of 10 runs. \footnote{All models obtained from
\url{https://github.com/minizinc/minizinc-benchmarks}:
\texttt{\justify{}accap, amaze, city-position, community-detection,
depot-placement, freepizza, groupsplitter, kidney-exchange, median-string,
multi-knapsack, nonogram, nside, problem, rcpsp-wet, road-cons, roster,
stack-cuttingstock, steelmillslab, train, triangular, zephyrus}.}
\Cref{sfig:compareruntime} compares the flattening time for each of the 100
instances. Points below the line indicate that the new system is faster. On
average, the new system achieves a speed-up of $2.3$, with very few instances
not achieving any speedup. In terms of memory performance
(\Cref{sfig:comparemem}), version 2.4.3 can sometimes still outperform the new
prototype. We have identified that the main memory bottlenecks are our currently
unoptimised implementations of CSE lookup tables and argument vectors.
These are very encouraging results, given that we are comparing a largely
unoptimised prototype to a mature piece of software.
\begin{figure}[t]
\centering
\begin{subfigure}[b]{.48\columnwidth}
\centering
\includegraphics[width=\columnwidth]{assets/img/4_compareruntime}
\caption{flattening run time (ms)}
\label{sfig:compareruntime}
\end{subfigure}%
\hspace{0.04\columnwidth}%
\begin{subfigure}[b]{.48\columnwidth}
\centering
\includegraphics[width=\columnwidth]{assets/img/4_comparememory}
\caption{flattening memory (MB)}
\label{sfig:comparemem}
\end{subfigure}
\caption{Performance on flattening 100 MiniZinc Challenge instances.
\minizinc\ 2.4.3 (x-axis) vs new architecture (y-axis), log-log plot. Dots
below the line indicate the new system is better.}
\label{fig:runtime}
\end{figure}

View File

@ -1,6 +1,250 @@
\chapter{Incremental Solving}\label{ch:incremental} \chapter{Incremental Processing}\label{ch:incremental}
%************************************************ %************************************************
Many applications require solving almost the same combinatorial problem
repeatedly, with only slight modifications, thousands of times. For example:
\begin{itemize}
\item Multi-objective problems~\autocite{jones-2002-multi-objective} are often
not supported directly in solvers. They can be solved using a
meta-search approach: find a solution to a (single-objective) problem,
then add more constraints to the original problem and repeat.
\item Large Neighbourhood Search~\autocite{shaw-1998-local-search} is a very
successful meta-heuristic. After finding a (sub-optimal) solution to a
problem, constraints are added to restrict the search in the
neighbourhood of that solution. When a new solution is found, the
constraints are removed, and constraints for a new neighbourhood are
added.
\item In Online Optimisation~\autocite{jaillet-2021-online}, a problem
instance is continuously updated with new data, such as newly available
jobs to be scheduled or customer requests to be processed.
\item Diverse Solution Search~\autocite{hebrard-2005-diverse} aims at
providing a set of solutions that are sufficiently different from each
other, in order to give human decision makers an overview of the
solution space. Diversity can be achieved by repeatedly solving a
problem instance with different objectives.
\item In Interactive Search~\autocite{}, a user provides feedback
on decisions made by the solver. The feedback is added back into the
problem, and a new solution is generated. Users may also take back some
earlier feedback and explore different aspects of the problem.
\end{itemize}
All of these examples have in common that a problem instance is solved, new
constraints are added, the resulting instance is solved again, and constraints
may be removed again.
The usage of these methods is not new to \minizinc\ and they have proven to be
very useful \autocite{rendl-2015-minisearch, schrijvers-2013-combinators,
dekker-2018-mzn-lns, schiendorfer-2018-minibrass}. In its most basic form, a
simple scripting language is sufficient to implement these methods, by
repeatedly calling on \minizinc\ to flatten and solve the updated problems.
While the techniques presented so far in this paper should already improve the
performance of these approaches, the overhead of re-flattening an almost
identical model may still prove prohibitive, warranting direct support for
adding and removing constraints in the \minizinc\ abstract machine. In this
section, we will see that our proposed architecture can be made
\emph{incremental}, significantly improving efficiency for these iterative
solving approaches.
\section{Incremental Flattening}
In order to support incremental flattening, the \nanozinc\ interpreter must be
able to process \nanozinc\ calls \emph{added} to an existing \nanozinc\ program,
as well as to \emph{remove} calls from an existing \nanozinc\ program. Adding new
calls is straightforward, since \nanozinc\ is already processed call-by-call.
Removing a call, however, is not so simple. When we remove a call, all effects
the call had on the \nanozinc\ program have to be undone, including results of
propagation, CSE and other simplifications.
\begin{example}\label{ex:6-incremental}
Consider the following \minizinc\ fragment:
\highlightfile{assets/mzn/6_incremental.mzn}
After evaluating the first constraint, the domain of \mzninline{x} is changed to
be less than 10. Evaluating the second constraint causes the domain of
\mzninline{y} to be less than 9. If we now, however, try to remove the first
constraint, it is not just the direct inference on the domain of \mzninline{x}
that has to be undone, but also any further effects of those changes -- in this
case, the changes to the domain of \mzninline{y}.
\end{example}
Due to this complex interaction between calls, we only support the removal of
calls in reverse chronological order, also known as \textit{backtracking}. The
common way of implementing backtracking is using a \textit{trail} data
structure~\autocite{warren-1983-wam}. The trail records all changes to the
\nanozinc\ program:
\begin{itemize}
\item the addition or removal of new variables or constraints,
\item changes made to the domains of variables,
\item additions to the CSE table, and
\item substitutions made due to equality propagation.
\end{itemize}
These changes can be caused by the evaluation of a call, propagation, or CSE.
When a call is removed, the corresponding changes can now be undone by
reversing any action recorded on the trail up to the point where the call was
added.
In order to limit the amount of trailing required, the programmer must create
explicit \textit{choice points} to which the system state can be reset. In
particular, this means that if no choice point was created before the initial
model was flattened, then this flattening can be performed without any
trailing.
\begin{example}\label{ex:6-trail}
Let us look again at the resulting \nanozinc\ code from \Cref{ex:absreif}:
% \highlightfile{assets/mzn/6_abs_reif_result.mzn}
Assume that we added a choice point before posting the constraint
\mzninline{c}. Then the trail stores the \emph{inverse} of all modifications
that were made to the \nanozinc\ as a result of \mzninline{c} (where
$\mapsfrom$ denotes restoring an identifier, and $\lhd$ \texttt{+}/\texttt{-}
respectively denote attaching and detaching constraints):
% \highlightfile{assets/mzn/6_abs_reif_trail.mzn}
To reconstruct the \nanozinc\ program at the choice point, we simply apply
the changes recorded in the trail, in reverse order.
\end{example}
\section{Incremental Solving}
Ideally, the incremental changes made by the interpreter would also be applied
incrementally to the solver. This requires the solver to support both the
dynamic addition and removal of variables and constraints. While some solvers
can support this functionality, most solvers have limitations. The system can
therefore support solvers with different levels of an incremental interface:
\begin{itemize}
\item Using a non-incremental interface, the solver is reinitialised with the
updated \nanozinc\ program every time. In this case, we still get a
performance benefit from the improved flattening time, but not from
incremental solving.
\item Using a \textit{warm-starting} interface, the solver is reinitialised
with the updated program as above, but it is also given a previous solution
to initialise some internal data structures. In particular for mathematical
programming solvers, this can result in dramatic performance gains compared
to ``cold-starting'' the solver every time.
\item Using a fully incremental interface, the solver is instructed to apply
the changes made by the interpreter. In this case, the trail data structure
is used to compute the set of \nanozinc\ changes since the last choice
point.
\end{itemize}
\section{Experiments}
We have created a prototype implementation of the architecture presented in the
preceding sections. It consists of a compiler from \minizinc\ to \microzinc, and
an incremental \microzinc\ interpreter producing \nanozinc. The system supports
a significant subset of the full \minizinc\ language; notable features that are
missing are support for set and float variables, option types, and compilation
of model output expressions and annotations. We will release our implementation
under an open-source license and can make it available to the reviewers upon
request.
The implementation is not optimised for performance yet, but was created as a
faithful implementation of the developed concepts, in order to evaluate their
suitability and provide a solid baseline for future improvements. In the
following we present experimental results on basic flattening performance as
well as incremental flattening and solving that demonstrate the efficiency
gains that are possible thanks to the new architecture.
\subsection{Incremental Flattening and Solving}
To demonstrate the advantage that the incremental processing of \minizinc\
offers, we present a runtime evaluation of two meta-heuristics implemented using
our prototype interpreter. For both meta-heuristics, we evaluate the performance
of fully re-evaluating and solving the instance from scratch, compared to the
fully incremental evaluation and solving. The solving in both tests is performed
by the Gecode solver, version 6.1.2, connected using the fully incremental API.
\paragraph{GBAC}
The Generalised Balanced Academic Curriculum (GBAC) problem
\autocite{chiarandini-2012-gbac} is comprised of scheduling the courses in a
curriculum subject to load limits on the number of courses for each period,
prerequisites for courses, and preferences of teaching periods by teaching
staff. It has been shown~\autocite{dekker-2018-mzn-lns} that Large Neighbourhood
Search (LNS) is a useful meta-heuristic for quickly finding high quality
solutions to this problem. In LNS, once an initial (sub-optimal) solution is
found, constraints are added to the problem that restrict the search space to a
\textit{neighbourhood} of the previous solution. After this neighbourhood has
been explored, the constraints are removed, and constraints for a different
neighbourhood are added. This is repeated until a sufficiently high solution
quality has been reached.
We can model a neighbourhood in \minizinc\ as a predicate that, given the values
of the variables in the previous solution, posts constraints to restrict the
search. The following predicate defines a suitable neighbourhood for the GBAC
problem:
\highlightfile{assets/mzn/6_gbac_neighbourhood.mzn}
When this predicate is called with a previous solution \mzninline{sol}, then
every \mzninline{period_of} variable has an $80\%$ chance to be fixed to its
value in the previous solution. With the remaining $20\%$, the variable is
unconstrained and will be part of the search for a better solution.
In a non-incremental architecture, we would re-flatten the original model plus
the neighbourhood constraint for each iteration of the LNS. In the incremental
\nanozinc\ architecture, we can easily express LNS as a repeated addition and
retraction of the neighbourhood constraints. We implemented both approaches
using the \nanozinc\ prototype, with the results shown in \Cref{fig:gbac}. The
incremental \nanozinc\ translation shows a 12x speedup compared to re-compiling
the model from scratch in each iteration. For this particular problem,
incrementality in the target solver (Gecode) does not lead to a significant
reduction in runtime.
\begin{figure}
\centering
\includegraphics[width=0.5\columnwidth]{assets/img/6_gbac}
\caption{\label{fig:gbac}A run-time performance comparison between incremental
processing (Incr.) and re-evaluation (Redo) of 5 GBAC \minizinc\ instances
in the application of LNS on a 3.4 GHz Quad-Core Intel Core i5 using the
Gecode 6.1.2 solver. Each run consisted of 2500 iterations of applying
neighbourhood predicates. Reported times are averages of 10 runs.}
\end{figure}
\paragraph{Radiation}
Our second experiment is based on a problem of planning cancer radiation therapy
treatment using multi-leaf collimators \autocite{baatar-2011-radiation}. Two
characteristics mark the quality of a solution: the amount of time the patient
is exposed to radiation, and the number of ``shots'' or different angles the
treatment requires. However, the first characteristic is considered more
important than the second. The problem therefore has a lexicographical
objective: a solution is better if it requires a strictly shorter exposure time,
or the same exposure time but a lower number of ``shots''.
\minizinc\ solvers do not support lexicographical objectives directly, but we
can instead repeatedly solve a model instance and add a constraint to ensure
that the lexicographical objective improves. When the solver proves that no
better solution can be found, the last solution is known to be optimal. Given
two variables \mzninline{exposure} and \mzninline{shots}, once we have found a
solution with \mzninline{exposure=e} and \mzninline{shots=s}, we can add the
constraint \mzninline{exposure < e \/ (exposure = e /\ shots < s)}, expressing
the lexicographic ordering, and continue the search. Since each added
lexicographic constraint is strictly stronger than the previous one, we never
have to retract previous constraints.
\begin{figure}
\centering
\includegraphics[width=0.5\columnwidth]{assets/img/6_radiation}
\caption{\label{fig:radiation}A run-time performance comparison between
incremental processing (Incr.) and re-evaluation (Redo) of 9 Radiation
\minizinc\ instances in the application of Lexicographic objectives on a 3.4
GHz Quad-Core Intel Core i5 using the Gecode 6.1.2 solver. Each test was run
to optimality and was conducted 20 times to provide an average.}
\end{figure}
As shown in \cref{fig:radiation}, the incremental processing of the added
\mzninline{lex_less} calls is a clear improvement over the re-evaluation of the
whole model. The translation shows a 13x speedup on average, and even the time
spent solving is reduced by 33\%.
\section{Modelling of Neighbourhoods and Meta-heuristics} \section{Modelling of Neighbourhoods and Meta-heuristics}
\label{section:2-modelling-nbhs} \label{section:2-modelling-nbhs}
% %
@ -38,8 +282,8 @@ extension is based on the constructs introduced in
\minisearch\ introduced a \minizinc\ extension that enables modellers to express \minisearch\ introduced a \minizinc\ extension that enables modellers to express
meta-searches inside a \minizinc\ model. A meta-search in \minisearch\ typically meta-searches inside a \minizinc\ model. A meta-search in \minisearch\ typically
solves a given \minizinc\ model, performs some calculations on the solution, adds solves a given \minizinc\ model, performs some calculations on the solution,
new constraints and then solves again. adds new constraints and then solves again.
An LNS definition in \minisearch\ consists of two parts. The first part is a An LNS definition in \minisearch\ consists of two parts. The first part is a
declarative definition of a neighbourhood as a \minizinc\ predicate that posts declarative definition of a neighbourhood as a \minizinc\ predicate that posts
@ -386,17 +630,9 @@ replace the function call with a type specific \mzninline{int_sol} predicate,
which is the constraint that will be executed by the solver. which is the constraint that will be executed by the solver.
% %
\begin{listing}[t] \begin{listing}[t]
\centering \highlightfile{assets/mzn/6_sol_function.mzn}
% \begin{mzn} \caption{\label{lst:6-int-sol} MiniZinc definition of the \mzninline{sol}
% predicate int_sol(var int: x, var int: xi); function for integer variables}
% function int: sol(var int: x) = if is_fixed(x) then fix(x)
% else let { var lb(x)..ub(x): xi;
% constraint int_sol(x,xi);
% } in xi;
% endif;
% \end{mzn}
\caption{\label{lst:6-int-sol} MiniZinc definition of the \mzninline{sol}
function for integer variables}
\end{listing} \end{listing}
% %
To improve the compilation of the model further, we use the declared bounds of To improve the compilation of the model further, we use the declared bounds of
@ -423,19 +659,11 @@ Note that the function accepts variable arguments \mzninline{l} and
as \mzninline{sol}. as \mzninline{sol}.
\begin{listing}[t] \begin{listing}[t]
\centering \highlightfile{assets/mzn/6_uniform_neighbourhood.mzn}
% \begin{mzn} \caption{\label{lst:6-int-rnd} MiniZinc definition of the
% predicate float_uniform(var float:l, var float: u, var float: r); \mzninline{uniform_nbh} function for floats}
% function var float: uniform_nbh(var float: l, var float: u) :: impure =
% let { var lb(l)..ub(u): rnd;
% constraint float_uniform(l,u,rnd):
% } in rnd;
% \end{mzn}
\caption{\label{lst:6-int-rnd} MiniZinc definition of the
\mzninline{uniform_nbh} function for floats}
\end{listing} \end{listing}
\subsection{Solver support for LNS \glsentrytext{flatzinc}} \subsection{Solver support for LNS \glsentrytext{flatzinc}}
We will now show the minimal extensions required from a solver to interpret the We will now show the minimal extensions required from a solver to interpret the