Fix indentation using latexindent

This commit is contained in:
Jip J. Dekker 2021-05-21 14:30:42 +10:00
parent 097d359724
commit fb565cf28c
No known key found for this signature in database
GPG Key ID: 517DF4A00618C9C3
10 changed files with 1981 additions and 1985 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,5 @@
\usepackage{silence}
\WarningFilter{biblatex}{File 'australian-apa.lbx'}
\hfuzz=1.5pt \hfuzz=1.5pt
\usepackage{fvextra} \usepackage{fvextra}
\usepackage{csquotes} \usepackage{csquotes}
@ -10,36 +12,36 @@
\usepackage{fontspec} \usepackage{fontspec}
%% Main font %% Main font
\setmainfont{IBMPlexSerif}[ \setmainfont{IBMPlexSerif}[
Path=assets/fonts/IBM-Plex-Serif/, Path=assets/fonts/IBM-Plex-Serif/,
Extension=.otf, Extension=.otf,
UprightFont=*-Regular, UprightFont=*-Regular,
BoldFont=*-Bold, BoldFont=*-Bold,
ItalicFont=*-Italic, ItalicFont=*-Italic,
BoldItalicFont=*-BoldItalic, BoldItalicFont=*-BoldItalic,
] ]
%% Title font %% Title font
\newfontfamily{\ibmplexsanscondensed}{IBMPlexSansCondensed}[ \newfontfamily{\ibmplexsanscondensed}{IBMPlexSansCondensed}[
Path=assets/fonts/IBM-Plex-Sans-Condensed/, Path=assets/fonts/IBM-Plex-Sans-Condensed/,
Extension=.otf, Extension=.otf,
UprightFont=*-Regular, UprightFont=*-Regular,
BoldFont=*-Bold, BoldFont=*-Bold,
ItalicFont=*-Italic, ItalicFont=*-Italic,
BoldItalicFont=*-BoldItalic, BoldItalicFont=*-BoldItalic,
] ]
\newfontfamily{\satisfyfont}{Satisfy}[ \newfontfamily{\satisfyfont}{Satisfy}[
Path=assets/fonts/Satisfy/, Path=assets/fonts/Satisfy/,
Extension=.ttf, Extension=.ttf,
UprightFont=*-Regular, UprightFont=*-Regular,
] ]
%% Monospace font %% Monospace font
\setmonofont{IBMPlexMono}[ \setmonofont{IBMPlexMono}[
Ligatures=TeX, Ligatures=TeX,
Path=assets/fonts/IBM-Plex-Mono/, Path=assets/fonts/IBM-Plex-Mono/,
Extension=.otf, Extension=.otf,
UprightFont=*-Regular, UprightFont=*-Regular,
BoldFont=*-Bold, BoldFont=*-Bold,
ItalicFont=*-Italic, ItalicFont=*-Italic,
BoldItalicFont=*-BoldItalic, BoldItalicFont=*-BoldItalic,
] ]
%% Mathmatical font %% Mathmatical font
\usepackage{amsmath} \usepackage{amsmath}
@ -48,7 +50,7 @@ BoldItalicFont=*-BoldItalic,
% References % References
\usepackage[ \usepackage[
style=apa, style=apa,
]{biblatex} ]{biblatex}
\usepackage[noabbrev]{cleveref} \usepackage[noabbrev]{cleveref}
@ -115,17 +117,17 @@ style=apa,
attach boxed title to top center, attach boxed title to top center,
boxed title style={empty,arc=0pt,outer arc=0pt,boxrule=0pt}, boxed title style={empty,arc=0pt,outer arc=0pt,boxrule=0pt},
underlay boxed title={ underlay boxed title={
\draw[#1] (title.north west) -- (title.north east) -- +(\tcboxedtitleheight,-\tcboxedtitleheight) \draw[#1] (title.north west) -- (title.north east) -- +(\tcboxedtitleheight,-\tcboxedtitleheight)
-- (title.south west) -- +(-\tcboxedtitleheight,0)-- cycle; -- (title.south west) -- +(-\tcboxedtitleheight,0)-- cycle;
}, },
overlay last={ overlay last={
\draw[#1] ([xshift=-80pt]frame.south) -- ++ (160pt,0pt); \draw[#1] ([xshift=-80pt]frame.south) -- ++ (160pt,0pt);
\fill[#1] ([xshift=-30pt,yshift=-1pt]frame.south) rectangle +(60pt,2pt); \fill[#1] ([xshift=-30pt,yshift=-1pt]frame.south) rectangle +(60pt,2pt);
}, },
overlay unbroken={ overlay unbroken={
\draw[#1] ([xshift=-80pt]frame.south) -- ++ (160pt,0pt); \draw[#1] ([xshift=-80pt]frame.south) -- ++ (160pt,0pt);
\fill[#1] ([xshift=-30pt,yshift=-1pt]frame.south) rectangle +(60pt,2pt); \fill[#1] ([xshift=-30pt,yshift=-1pt]frame.south) rectangle +(60pt,2pt);
}, },
title={Example \theexample{}}, title={Example \theexample{}},
colback=white, colback=white,
coltitle=black, coltitle=black,
@ -134,9 +136,9 @@ style=apa,
% Code formatting % Code formatting
\usepackage[ \usepackage[
chapter, chapter,
cachedir=listings, cachedir=listings,
outputdir=build, outputdir=build,
]{minted} ]{minted}
\usemintedstyle{borland} \usemintedstyle{borland}
@ -147,24 +149,24 @@ outputdir=build,
boxrule=0pt, boxrule=0pt,
outer arc=0pt, outer arc=0pt,
overlay first={ overlay first={
\draw[#1] (frame.north west) -- ++ (0,-5pt); \draw[#1] (frame.north west) -- ++ (0,-5pt);
\draw[#1] (frame.north west) -- ++ (25pt, 0pt); \draw[#1] (frame.north west) -- ++ (25pt, 0pt);
}, },
overlay last={ overlay last={
\draw[#1] (frame.south west) -- ++ (0, 5pt); \draw[#1] (frame.south west) -- ++ (0, 5pt);
\draw[#1] (frame.south west) -- ++ (25pt,0pt); \draw[#1] (frame.south west) -- ++ (25pt,0pt);
}, },
overlay unbroken={ overlay unbroken={
\draw[#1] (frame.north west) -- ++ (0,-5pt); \draw[#1] (frame.north west) -- ++ (0,-5pt);
\draw[#1] (frame.north west) -- ++ (25pt, 0pt); \draw[#1] (frame.north west) -- ++ (25pt, 0pt);
\draw[#1] (frame.south west) -- ++ (0, 5pt); \draw[#1] (frame.south west) -- ++ (0, 5pt);
\draw[#1] (frame.south west) -- ++ (25pt,0pt); \draw[#1] (frame.south west) -- ++ (25pt,0pt);
}, },
colback=white, colback=white,
colframe=white, colframe=white,
} }
\BeforeBeginEnvironment{minted}{\begin{listingbox}} \BeforeBeginEnvironment{minted}{\begin{listingbox}}
\AfterEndEnvironment{minted}{\end{listingbox}} \AfterEndEnvironment{minted}{\end{listingbox}}
\newcommand{\ptinline}[1]{{\texttt{\small {#1}}}} \newcommand{\ptinline}[1]{{\texttt{\small {#1}}}}

View File

@ -2,7 +2,7 @@
\chapter{Introduction}\label{ch:introduction} \chapter{Introduction}\label{ch:introduction}
%************************************************ %************************************************
High-level \cmls, like \minizinc, were originally designed as a convenient input High-level \cmls{}, like \minizinc{}, were originally designed as a convenient input
language for \gls{cp} solvers \autocite{marriott-1998-clp}. A user would write a language for \gls{cp} solvers \autocite{marriott-1998-clp}. A user would write a
model consisting of a few loops or comprehensions; given a data file for the model consisting of a few loops or comprehensions; given a data file for the
parameters, this would be rewritten into a relatively small set of constraints parameters, this would be rewritten into a relatively small set of constraints
@ -14,13 +14,13 @@ acceptable: models (both before and after translation) were small, translation
was fast, and the expected results of translation were obvious. The current was fast, and the expected results of translation were obvious. The current
architecture is illustrated in \Cref{sfig:4-oldarch}. architecture is illustrated in \Cref{sfig:4-oldarch}.
But over time, the uses of high-level \cmls\ have expanded greatly from this But over time, the uses of high-level \cmls{} have expanded greatly from this
original vision. It is now used to generate input for wildly different solver original vision. It is now used to generate input for wildly different solver
technologies: not just \gls{cp}, but also \gls{mip} \autocite{wolsey-1988-mip}, technologies: not just \gls{cp}, but also \gls{mip} \autocite{wolsey-1988-mip},
\gls{sat} \autocite{davis-1962-dpll} and \gls{cbls} \gls{sat} \autocite{davis-1962-dpll} and \gls{cbls}
\autocite{bjordal-2015-fzn2oscarcbls} solvers. Crucially, the same constraint \autocite{bjordal-2015-fzn2oscarcbls} solvers. Crucially, the same constraint
model can be used with any of these solvers, which is achieved through the use model can be used with any of these solvers, which is achieved through the use
of solver-specific libraries of constraint definitions. In \minizinc, these of solver-specific libraries of constraint definitions. In \minizinc{}, these
solver libraries are written in the same language. solver libraries are written in the same language.
As such, \minizinc\ turned out to be much more expressive than expected, so more As such, \minizinc\ turned out to be much more expressive than expected, so more
@ -35,42 +35,43 @@ However, as they have become more common, these extended uses have revealed
weaknesses of the existing \minizinc\ tool chain. In particular: weaknesses of the existing \minizinc\ tool chain. In particular:
\begin{itemize} \begin{itemize}
\item The \minizinc\ compiler is inefficient. It does a surprisingly large \item The \minizinc\ compiler is inefficient. It does a surprisingly large
amount of work for each expression (especially resolving sub-typing and amount of work for each expression (especially resolving sub-typing
overloading), which may be repeated many times --- for example, inside and overloading), which may be repeated many times --- for example,
the body of a comprehension. And as models generated for other solver inside the body of a comprehension. And as models generated for
technologies (particularly \gls{mip}) can be quite large, the resulting other solver technologies (particularly \gls{mip}) can be quite
flattening procedure can be intolerably slow. As the model large, the resulting flattening procedure can be intolerably slow.
transformations implemented in \minizinc\ become more sophisticated, As the model transformations implemented in \minizinc\ become more
these performance problems are simply magnified. sophisticated, these performance problems are simply magnified.
\item The generated models often contain unnecessary constraints. During the \item The generated models often contain unnecessary constraints. During
transformation, functional expressions are replaced with constraints. the transformation, functional expressions are replaced with
But this breaks the functional dependencies: if the original expression constraints. But this breaks the functional dependencies: if the
later becomes redundant (due to model simplifications), \minizinc\ may original expression later becomes redundant (due to model
fail to detect that the constraint can be removed. simplifications), \minizinc\ may fail to detect that the constraint
\item Monolithic flattening is wasteful. When \minizinc\ is used for can be removed.
multi-shot solving, there is typically a large base model common to all \item Monolithic flattening is wasteful. When \minizinc\ is used for
subproblems, and a small set of constraints which are added or removed multi-shot solving, there is typically a large base model common to
in each iteration. But with the existing \minizinc\ architecture, the all sub-problems, and a small set of constraints which are added or
whole model must be re-flattened each time. Many use cases involve removed in each iteration. But with the existing \minizinc\
generating a base model, then repeatedly adding or removing a few architecture, the whole model must be re-flattened each time. Many
constraints before re-solving. In the current tool chain, the whole use cases involve generating a base model, then repeatedly adding or
model must be fully re-flattened each time. Not only does this repeat removing a few constraints before re-solving. In the current tool
all the work done to flatten the base model, This means a large chain, the whole model must be fully re-flattened each time. Not
(sometimes dominant) portion of runtime is simply flattening the core only does this repeat all the work done to flatten the base model,
model over and over again. But it also prevents \emph{the solver} from This means a large (sometimes dominant) portion of runtime is simply
carrying over anything it learnt from one problem to the next, closely flattening the core model over and over again. But it also prevents
related, problem. \emph{the solver} from carrying over anything it learnt from one
problem to the next, closely related, problem.
\end{itemize} \end{itemize}
In this thesis, we revisit the rewriting of high-level \cmls\ into solver-level In this thesis, we revisit the rewriting of high-level \cmls\ into solver-level
constraint models and describe an architecture that allows us to: constraint models and describe an architecture that allows us to:
\begin{itemize} \begin{itemize}
\item easily integrate a range of \textbf{optimisation and simplification} \item easily integrate a range of \textbf{optimisation and simplification}
techniques, techniques,
\item effectively \textbf{detect and eliminate dead code} introduced by \item effectively \textbf{detect and eliminate dead code} introduced by
functional definitions, and functional definitions, and
\item support \textbf{incremental flattening and solving}, and better \item support \textbf{incremental flattening and solving}, and better
integration with solvers providing native incremental features. integration with solvers providing native incremental features.
\end{itemize} \end{itemize}

View File

@ -1,5 +1,5 @@
%************************************************ %************************************************
\chapter{Review of Literature}\label{ch:background} \chapter{Background}\label{ch:background}
%************************************************ %************************************************
A goal shared between all programming languages is to provide a certain level of A goal shared between all programming languages is to provide a certain level of
@ -34,12 +34,6 @@ solution, these models can generally be given to a dedicated solving program, or
\solver{} for short, that can find a solution that fits the requirements of the \solver{} for short, that can find a solution that fits the requirements of the
model. model.
\begin{listing}
\pyfile{assets/py/2_dyn_knapsack.py}
\caption{\label{lst:2-dyn-knapsack} A Python program that solves a 0-1 knapsack
problem using dynamic programming}
\end{listing}
\begin{example}% \begin{example}%
\label{ex:back-knapsack} \label{ex:back-knapsack}
@ -78,6 +72,12 @@ model.
\end{example} \end{example}
\begin{listing}
\pyfile{assets/py/2_dyn_knapsack.py}
\caption{\label{lst:2-dyn-knapsack} A Python program that solves a 0-1 knapsack
problem using dynamic programming}
\end{listing}
In the remainder of this chapter we will first, in \cref{sec:back-minizinc} we In the remainder of this chapter we will first, in \cref{sec:back-minizinc} we
introduce \minizinc\ as the leading \cml\ used within this thesis. In introduce \minizinc\ as the leading \cml\ used within this thesis. In
\cref{sec:back-solving} we discuss how \gls{cp} can be used to solve a \cref{sec:back-solving} we discuss how \gls{cp} can be used to solve a
@ -445,14 +445,13 @@ resulting definition. There are three main purposes for \glspl{let}:
multiplication constraint \mzninline{pred_int_times}. multiplication constraint \mzninline{pred_int_times}.
\end{enumerate} \end{enumerate}
An important detail in flattening \glspl{let} is that any \variables{} that An important detail in flattening \glspl{let} is that any \variables{} that are
are introduced might need to be renamed in the resulting solver level model. introduced might need to be renamed in the resulting solver level model.
Different from top-level definitions, the \variables{} declared in Different from top-level definitions, the \variables{} declared in \glspl{let}
\glspl{let} can be flattened multiple times when used in loops, function can be flattened multiple times when used in loops, function definitions (that
definitions (that are called multiple times), and \gls{array} are called multiple times), and \gls{array} \glspl{comprehension}. In these
\glspl{comprehension}. In these cases the flattener must assign any cases the flattener must assign any \variables{} in the \gls{let} a new name and
\variables{} in the \gls{let} a new name and use this name in any subsequent use this name in any subsequent definitions and in the resulting expression.
definitions and in the resulting expression.
\subsection{Reification}% \subsection{Reification}%
\label{subsec:back-reification} \label{subsec:back-reification}
@ -460,8 +459,8 @@ definitions and in the resulting expression.
With the rich expression language in \minizinc{}, \constraints{} can consist of With the rich expression language in \minizinc{}, \constraints{} can consist of
complex expressions that do not translate to a single constraint at the complex expressions that do not translate to a single constraint at the
\solver{} level. Instead, different parts of a complex expression will be \solver{} level. Instead, different parts of a complex expression will be
translated into different \constraints{}. Not all of these \constraints{} will be translated into different \constraints{}. Not all of these \constraints{} will
globally enforced by the solver. \constraints{} stemming for sub-expressions be globally enforced by the solver. \constraints{} stemming for sub-expressions
will typically be \emph{reified} into a Boolean variable. \Gls{reification} will typically be \emph{reified} into a Boolean variable. \Gls{reification}
means that a variable \mzninline{b} is constrained to be true if and only if a means that a variable \mzninline{b} is constrained to be true if and only if a
corresponding constraint \mzninline{c(...)} holds. corresponding constraint \mzninline{c(...)} holds.
@ -509,8 +508,8 @@ Some expressions in the \cmls\ do not always have a well-defined result.
the constraint should be trivially true. the constraint should be trivially true.
\end{example} \end{example}
Part of the semantics of a \cmls{} is the choice as to how to treat these partial Part of the semantics of a \cmls{} is the choice as to how to treat these
functions. Examples of such expressions in \minizinc\ are: partial functions. Examples of such expressions in \minizinc\ are:
\begin{itemize} \begin{itemize}
\item Division (or modulus) when the divisor is zero: \item Division (or modulus) when the divisor is zero:

View File

@ -22,12 +22,12 @@ larger.
In this chapter, we revisit the rewriting of high-level \cmls\ into solver-level In this chapter, we revisit the rewriting of high-level \cmls\ into solver-level
constraint models. We describe a new \textbf{systematic view of the execution of constraint models. We describe a new \textbf{systematic view of the execution of
\minizinc{}} and build on this to propose a new tool chain. We show how this \minizinc{}} and build on this to propose a new tool chain. We show how this
tool chain allows us to: tool chain allows us to:
\begin{itemize} \begin{itemize}
\item efficiently rewrite high-level constraint models with \textbf{minimal \item efficiently rewrite high-level constraint models with \textbf{minimal
overhead}, overhead},
\item easily integrate a range of \textbf{optimisation and simplification} \item easily integrate a range of \textbf{optimisation and simplification}
techniques, techniques,
\item and effectively \textbf{detect and eliminate dead code} introduced by \item and effectively \textbf{detect and eliminate dead code} introduced by
@ -100,20 +100,20 @@ and where expressions have \mzninline{par} type.
<literal> ::= <constant> | <ident> <literal> ::= <constant> | <ident>
<exp> ::= <literal> <exp> ::= <literal>
\alt <ident>"(" <exp> ["," <exp>]* ")" \alt <ident>"(" <exp> ["," <exp>]* ")"
\alt "let" "{" <item>* "}" "in" <literal> \alt "let" "{" <item>* "}" "in" <literal>
\alt "if" <exp> "then" <exp> "else" <exp> "endif" \alt "if" <exp> "then" <exp> "else" <exp> "endif"
\alt "[" <exp> "|" <gen-exp> "where" <exp> "]" \alt "[" <exp> "|" <gen-exp> "where" <exp> "]"
\lat <ident>"[" <literal> "]" \lat <ident>"[" <literal> "]"
<item> ::= <param> [ "=" <exp> ]";" <item> ::= <param> [ "=" <exp> ]";"
\alt "tuple(" <type-inst> ["," <type-inst>]* "):" <ident> = "(" <exp> ["," <exp>]* ")"; \alt "tuple(" <type-inst> ["," <type-inst>]* "):" <ident> = "(" <exp> ["," <exp>]* ")";
\alt "(" <param> ["," <param>]* ")" "=" <exp>";" \alt "(" <param> ["," <param>]* ")" "=" <exp>";"
\alt "constraint" <exp>";" \alt "constraint" <exp>";"
<param> ::= <type-inst>":" <ident> <param> ::= <type-inst>":" <ident>
<gen-exp> ::= <ident> "in" <exp> ["," <ident> "in" <exp> ]* <gen-exp> ::= <ident> "in" <exp> ["," <ident> "in" <exp> ]*
\end{grammar} \end{grammar}
\caption{\label{fig:4-uzn-syntax}Syntax of \microzinc{}.} \caption{\label{fig:4-uzn-syntax}Syntax of \microzinc{}.}
\end{figure} \end{figure}
@ -127,18 +127,18 @@ explained in detail in \cref{sec:4-nanozinc}, for now you can ignore them).
\begin{figure} \begin{figure}
\begin{grammar} \begin{grammar}
<nzn> ::= <nzn-item>* <nzn> ::= <nzn-item>*
<nzn-item> ::= <nzn-def> | <nzn-con> <nzn-item> ::= <nzn-def> | <nzn-con>
<nzn-con> ::= "constraint" <ident>"(" <literal> ["," <literal>]* ")"";" <nzn-con> ::= "constraint" <ident>"(" <literal> ["," <literal>]* ")"";"
<nzn-var> ::= "var" <nzn-dom> ":" <ident> ";" <nzn-bind>* <nzn-var> ::= "var" <nzn-dom> ":" <ident> ";" <nzn-bind>*
<nzn-dom> ::= <constant> ".." <constant> | <constant> <nzn-dom> ::= <constant> ".." <constant> | <constant>
\alt "bool" | "int" | "float" | "set of int" \alt "bool" | "int" | "float" | "set of int"
<nzn-bind> ::= "↳" <nzn-con> <nzn-bind> ::= "↳" <nzn-con>
\end{grammar} \end{grammar}
\caption{\label{fig:4-nzn-syntax}Syntax of \nanozinc{}.} \caption{\label{fig:4-nzn-syntax}Syntax of \nanozinc{}.}
\end{figure} \end{figure}
@ -154,9 +154,9 @@ The translation from \minizinc\ to \microzinc{} involves the following steps:
expressions like \mzninline{x+y} by function calls expressions like \mzninline{x+y} by function calls
\mzninline{int_plus(x,y)}. As mentioned above, more complex rules for \mzninline{int_plus(x,y)}. As mentioned above, more complex rules for
rewriting conditionals with variables, such as \mzninline{if x then A rewriting conditionals with variables, such as \mzninline{if x then A
else B endif} and \mzninline{A[x]} where \mzninline{x} is a variable, else B endif} and \mzninline{A[x]} where \mzninline{x} is a variable,
are also replaced by function calls (\eg\ \mzninline{if_then_else([x, are also replaced by function calls (\eg\ \mzninline{if_then_else([x,
true], [A, B])} and \mzninline{element(a, x)} respectively). true], [A, B])} and \mzninline{element(a, x)} respectively).
\item Replace optional variables into non-optional forms. As shown by Mears et \item Replace optional variables into non-optional forms. As shown by Mears et
al., optional variable types can be represented using a variable of the al., optional variable types can be represented using a variable of the
@ -189,8 +189,8 @@ The translation from \minizinc\ to \microzinc{} involves the following steps:
is only defined when \mzninline{x in index_set(a)}. A total version to is only defined when \mzninline{x in index_set(a)}. A total version to
be used in \microzinc\ could look like\footnote{For brevity's sake, we be used in \microzinc\ could look like\footnote{For brevity's sake, we
will still use \minizinc\ operators to write the \microzinc\ will still use \minizinc\ operators to write the \microzinc\
definition.}: definition.}:
\begin{mzn} \begin{mzn}
function tuple(var int, var bool): element_safe(array of int: a, var int: x) = function tuple(var int, var bool): element_safe(array of int: a, var int: x) =
@ -409,25 +409,25 @@ identifiers are fresh (\ie\ not used in the rest of the \nanozinc\ program), by
suitable alpha renaming. suitable alpha renaming.
\begin{figure*} \begin{figure*}
\centering \centering
\begin{prooftree} \begin{prooftree}
\hypo{\texttt{function} ~t\texttt{:}~\ptinline{F(\(p_1, \ldots{}, p_k\)) = E;} \in{} \Prog{} \text{~where the~} p_i \text{~are fresh}} \hypo{\texttt{function} ~t\texttt{:}~\ptinline{F(\(p_1, \ldots{}, p_k\)) = E;} \in{} \Prog{} \text{~where the~} p_i \text{~are fresh}}
\infer[no rule]1{\Sem{\(\ptinline{E}_{[p_i \mapsto a_i, \forall{} 1 \leq{} i \leq{} k]}\)}{\Prog, \Env} \Rightarrow{} \tuple{v, \Env'}} \infer[no rule]1{\Sem{\(\ptinline{E}_{[p_i \mapsto a_i, \forall{} 1 \leq{} i \leq{} k]}\)}{\Prog, \Env} \Rightarrow{} \tuple{v, \Env'}}
\infer1[(Call)]{\Sem{F(\(a_1, \ldots, a_k\))}{\Prog, \Env} \Rightarrow{} \tuple{v, \Env'}} \infer1[(Call)]{\Sem{F(\(a_1, \ldots, a_k\))}{\Prog, \Env} \Rightarrow{} \tuple{v, \Env'}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\ptinline{F} \in \text{Builtins}} \hypo{\ptinline{F} \in \text{Builtins}}
\infer1[(CallBuiltin)]{\Sem{F(\(a_1, \ldots, a_k\))}{\Prog, \Env} \Rightarrow{} \tuple{\ensuremath{\mathit{eval}(\ptinline{F}(a_1,\ldots, a_k))}, \Env}} \infer1[(CallBuiltin)]{\Sem{F(\(a_1, \ldots, a_k\))}{\Prog, \Env} \Rightarrow{} \tuple{\ensuremath{\mathit{eval}(\ptinline{F}(a_1,\ldots, a_k))}, \Env}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\texttt{predicate}~\ptinline{F(\(p_1, \ldots, p_k\));} \in \Prog} \hypo{\texttt{predicate}~\ptinline{F(\(p_1, \ldots, p_k\));} \in \Prog}
\infer1[(CallPredicate)]{\Sem{F(\(a_1, \ldots, a_k\))}{\Prog, \Env} \Rightarrow{} \infer1[(CallPredicate)]{\Sem{F(\(a_1, \ldots, a_k\))}{\Prog, \Env} \Rightarrow{}
\tuple{\ensuremath{\texttt{constraint}~\ptinline{F}(a_1, \ldots, a_k), \Env}}} \tuple{\ensuremath{\texttt{constraint}~\ptinline{F}(a_1, \ldots, a_k), \Env}}}
\end{prooftree} \end{prooftree}
\caption{\label{fig:4-rewrite-to-nzn-calls} Rewriting rules for partial evaluation \caption{\label{fig:4-rewrite-to-nzn-calls} Rewriting rules for partial evaluation
of \microzinc\ calls to \nanozinc{}.} of \microzinc\ calls to \nanozinc{}.}
\end{figure*} \end{figure*}
The rules in \cref{fig:4-rewrite-to-nzn-calls} handle function calls. The first The rules in \cref{fig:4-rewrite-to-nzn-calls} handle function calls. The first
@ -449,47 +449,47 @@ considered primitives, and as such simply need to be transferred into the
\nanozinc\ program. \nanozinc\ program.
\begin{figure*} \begin{figure*}
\centering \centering
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(\mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \emptyset} \Rightarrow \tuple{x, \Env'}} \hypo{\Sem{\(\mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \emptyset} \Rightarrow \tuple{x, \Env'}}
\infer1[(Let)]{\Sem{let \{ \(\mathbf{I}\) \} in \(\mathbf{X}\)}{\Prog, \Env} \Rightarrow\ \tuple{x, \Env'}} \infer1[(Let)]{\Sem{let \{ \(\mathbf{I}\) \} in \(\mathbf{X}\)}{\Prog, \Env} \Rightarrow\ \tuple{x, \Env'}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(\mathbf{X}\)}{\Prog, \{ t : x \} \cup{} \Env} \Rightarrow \tuple{x, \{ t: x \} \cup{} \Env'}} \hypo{\Sem{\(\mathbf{X}\)}{\Prog, \{ t : x \} \cup{} \Env} \Rightarrow \tuple{x, \{ t: x \} \cup{} \Env'}}
\infer1[(Item0)]{\Sem{\(\epsilon{} \mid{} \mathbf{X}\)}{\Prog, \{ t: x \}\cup{}\Env, \Ctx} \Rightarrow{} \tuple{x, \{ t: x \texttt{~↳~} \Ctx{} \} \cup{} \Env'}} \infer1[(Item0)]{\Sem{\(\epsilon{} \mid{} \mathbf{X}\)}{\Prog, \{ t: x \}\cup{}\Env, \Ctx} \Rightarrow{} \tuple{x, \{ t: x \texttt{~↳~} \Ctx{} \} \cup{} \Env'}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(\mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env\ \cup\ \{t : x\}, \Ctx} \Rightarrow \tuple{x, \Env'}} \hypo{\Sem{\(\mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env\ \cup\ \{t : x\}, \Ctx} \Rightarrow \tuple{x, \Env'}}
\infer1[(ItemT)]{\Sem{\(t\texttt{:}~x\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow{} \tuple{x, \Env'}} \infer1[(ItemT)]{\Sem{\(t\texttt{:}~x\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow{} \tuple{x, \Env'}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(E\)}{\Prog, \Env} \Rightarrow \tuple{v, \Env'}} \hypo{\Sem{\(E\)}{\Prog, \Env} \Rightarrow \tuple{v, \Env'}}
\hypo{\Sem{\(\mathbf{I}_{[x \mapsto v]} \mid{} \mathbf{X}_{[x \mapsto v]}\)}{\Prog, \Env', \Ctx} \Rightarrow\ \tuple{x, \Env''}} \hypo{\Sem{\(\mathbf{I}_{[x \mapsto v]} \mid{} \mathbf{X}_{[x \mapsto v]}\)}{\Prog, \Env', \Ctx} \Rightarrow\ \tuple{x, \Env''}}
\infer2[(ItemTE)]{\Sem{\(t\texttt{:}~x = E\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow\ \tuple{x, \Env''}} \infer2[(ItemTE)]{\Sem{\(t\texttt{:}~x = E\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow\ \tuple{x, \Env''}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(E_{i}\)}{\Prog,\Env^{i-1}} \Rightarrow \tuple{v_{i}, \Env^{i}}, \forall{} 1 \leq{} i \leq{} k} \hypo{\Sem{\(E_{i}\)}{\Prog,\Env^{i-1}} \Rightarrow \tuple{v_{i}, \Env^{i}}, \forall{} 1 \leq{} i \leq{} k}
\infer[no rule]1{\Sem{\(\mathbf{I}_{[x_{\langle{}i\rangle} \mapsto v_{i}, \forall{} 1 \leq{} i \leq{} k]} \mid{} \mathbf{X}_{[x_{\langle{}i\rangle} \mapsto v_{i}, \forall{} 1 \leq{} i \leq{} k]}\)}{\Prog, \Env^{k}, \Ctx} \Rightarrow\ \tuple{x, \Env'}} \infer[no rule]1{\Sem{\(\mathbf{I}_{[x_{\langle{}i\rangle} \mapsto v_{i}, \forall{} 1 \leq{} i \leq{} k]} \mid{} \mathbf{X}_{[x_{\langle{}i\rangle} \mapsto v_{i}, \forall{} 1 \leq{} i \leq{} k]}\)}{\Prog, \Env^{k}, \Ctx} \Rightarrow\ \tuple{x, \Env'}}
\infer1[(ItemTupC)]{\Sem{\( \texttt{tuple(}t_{1}, \ldots, t_{k}\texttt{):}~x = \left(E_{1}, \ldots, E_{k}\right)\)}{\Prog, \Env^{0}, \Ctx} \Rightarrow{} \tuple{x, \Env'}} \infer1[(ItemTupC)]{\Sem{\( \texttt{tuple(}t_{1}, \ldots, t_{k}\texttt{):}~x = \left(E_{1}, \ldots, E_{k}\right)\)}{\Prog, \Env^{0}, \Ctx} \Rightarrow{} \tuple{x, \Env'}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(E\)}{\Prog, \Env} \Rightarrow \tuple{\left(v_{\langle{}1\rangle}, \ldots, v_{\langle{}k\rangle}\right), \Env'}} \hypo{\Sem{\(E\)}{\Prog, \Env} \Rightarrow \tuple{\left(v_{\langle{}1\rangle}, \ldots, v_{\langle{}k\rangle}\right), \Env'}}
\infer[no rule]1{\Sem{\(\mathbf{I}_{[x_{i} \mapsto v_{\langle{}i\rangle}, \forall{} 1 \leq{} i \leq{} k]} \mid{} \mathbf{X}_{[x_{i} \mapsto v_{\langle{}i\rangle}, \forall{} 1 \leq{} i \leq{} k]}\)}{\Prog, \Env', \Ctx} \Rightarrow\ \tuple{x, \Env''}} \infer[no rule]1{\Sem{\(\mathbf{I}_{[x_{i} \mapsto v_{\langle{}i\rangle}, \forall{} 1 \leq{} i \leq{} k]} \mid{} \mathbf{X}_{[x_{i} \mapsto v_{\langle{}i\rangle}, \forall{} 1 \leq{} i \leq{} k]}\)}{\Prog, \Env', \Ctx} \Rightarrow\ \tuple{x, \Env''}}
\infer1[(ItemTupD)]{\Sem{\(\left(t_{1}\texttt{:}~x_{1}, \ldots, t_{k}\texttt{:}~x_{k}\right) = E\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow\ \tuple{x, \Env''}} \infer1[(ItemTupD)]{\Sem{\(\left(t_{1}\texttt{:}~x_{1}, \ldots, t_{k}\texttt{:}~x_{k}\right) = E\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow\ \tuple{x, \Env''}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(C\)}{\Prog, \Env} \Rightarrow \tuple{c, \Env'}} \hypo{\Sem{\(C\)}{\Prog, \Env} \Rightarrow \tuple{c, \Env'}}
\hypo{\Sem{\(\mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env', \{c\}\cup\Ctx} \Rightarrow \tuple{x, \Env''}} \hypo{\Sem{\(\mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env', \{c\}\cup\Ctx} \Rightarrow \tuple{x, \Env''}}
\infer2[(ItemC)]{\Sem{\(\mbox{constraint}~C\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow\ \tuple{x, \Env''}} \infer2[(ItemC)]{\Sem{\(\mbox{constraint}~C\texttt{;} \mathbf{I} \mid{} \mathbf{X}\)}{\Prog, \Env, \Ctx} \Rightarrow\ \tuple{x, \Env''}}
\end{prooftree} \end{prooftree}
\caption{\label{fig:4-rewrite-to-nzn-let} Rewriting rules for partial evaluation \caption{\label{fig:4-rewrite-to-nzn-let} Rewriting rules for partial evaluation
of \microzinc\ let expressions to \nanozinc{}.} of \microzinc\ let expressions to \nanozinc{}.}
\end{figure*} \end{figure*}
The rules in \cref{fig:4-rewrite-to-nzn-let} considers let expressions. Starting The rules in \cref{fig:4-rewrite-to-nzn-let} considers let expressions. Starting
@ -506,54 +506,54 @@ to mark the \(i^{\text{th}}\) member of the tuple \(x\) in our substitution
notation. notation.
\begin{figure*} \begin{figure*}
\centering \centering
\begin{prooftree} \begin{prooftree}
\hypo{x \in \langle \text{ident} \rangle} \hypo{x \in \langle \text{ident} \rangle}
\hypo{\{t: x \texttt{~↳~} \phi\ \} \in \Env} \hypo{\{t: x \texttt{~↳~} \phi\ \} \in \Env}
\infer2[(IdX)]{\Sem{\(x\)}{\Prog, \Env} \Rightarrow{} \tuple{x, \Env}} \infer2[(IdX)]{\Sem{\(x\)}{\Prog, \Env} \Rightarrow{} \tuple{x, \Env}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(x\)}{\Prog, \Env} \Rightarrow \tuple{[x_{1}, \ldots, x_{n}], \Env'}} \hypo{\Sem{\(x\)}{\Prog, \Env} \Rightarrow \tuple{[x_{1}, \ldots, x_{n}], \Env'}}
\hypo{\Sem{\(i\)}{\Prog, \Env'} \Rightarrow \tuple{v, \Env''}} \hypo{\Sem{\(i\)}{\Prog, \Env'} \Rightarrow \tuple{v, \Env''}}
\hypo{v \in [1 \ldots{} n]} \hypo{v \in [1 \ldots{} n]}
\infer3[(Access)]{\Sem{\(x\texttt{[}i\texttt{]}\)}{\Prog, \Env} \Rightarrow{} \tuple{x_{v}, \Env''}} \infer3[(Access)]{\Sem{\(x\texttt{[}i\texttt{]}\)}{\Prog, \Env} \Rightarrow{} \tuple{x_{v}, \Env''}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{c \text{~constant}} \hypo{c \text{~constant}}
\infer1[(Const)]{\Sem{\(c\)}{\Prog, \Env} \Rightarrow{} \tuple{c, \Env}} \infer1[(Const)]{\Sem{\(c\)}{\Prog, \Env} \Rightarrow{} \tuple{c, \Env}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(C\)}{\Prog, \Env} \Rightarrow \tuple{\ptinline{true}, \_}} \hypo{\Sem{\(C\)}{\Prog, \Env} \Rightarrow \tuple{\ptinline{true}, \_}}
\infer1[(If\(_T\))]{\Sem{if \(C\) then \(E_t\) else \(E_e\) endif}{\Prog, \Env} \Rightarrow{} \Sem{\(E_t\)}{\Prog, \Env}} \infer1[(If\(_T\))]{\Sem{if \(C\) then \(E_t\) else \(E_e\) endif}{\Prog, \Env} \Rightarrow{} \Sem{\(E_t\)}{\Prog, \Env}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(C\)}{\Prog, \Env} \Rightarrow \tuple{\ptinline{false}, \_}} \hypo{\Sem{\(C\)}{\Prog, \Env} \Rightarrow \tuple{\ptinline{false}, \_}}
\infer1[(If\(_F\))]{\Sem{if \(C\) then \(E_t\) else \(E_e\) endif}{\Prog, \Env} \Rightarrow{} \Sem{\(E_e\)}{\Prog, \Env}} \infer1[(If\(_F\))]{\Sem{if \(C\) then \(E_t\) else \(E_e\) endif}{\Prog, \Env} \Rightarrow{} \Sem{\(E_e\)}{\Prog, \Env}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{G}{\Prog,\Env} \Rightarrow \tuple{ \ptinline{true}, \_}} \hypo{\Sem{G}{\Prog,\Env} \Rightarrow \tuple{ \ptinline{true}, \_}}
\hypo{\Sem{\(E\)}{\Prog, \Env} \Rightarrow \tuple {v, \Env'}} \hypo{\Sem{\(E\)}{\Prog, \Env} \Rightarrow \tuple {v, \Env'}}
\infer2[(WhereT)]{\Sem{\([E ~|~ \mbox{where~} G]\)}{\Prog, \Env} \Rightarrow{} \tuple{[v], \Env'}} \infer2[(WhereT)]{\Sem{\([E ~|~ \mbox{where~} G]\)}{\Prog, \Env} \Rightarrow{} \tuple{[v], \Env'}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{G}{\Prog,\Env} \Rightarrow \tuple{\ptinline{false}, \_}} \hypo{\Sem{G}{\Prog,\Env} \Rightarrow \tuple{\ptinline{false}, \_}}
\infer1[(WhereF)]{\Sem{\([E ~|~ \mbox{where~} G]\)}{\Prog, \Env} \Rightarrow{} \tuple{[], \Env}} \infer1[(WhereF)]{\Sem{\([E ~|~ \mbox{where~} G]\)}{\Prog, \Env} \Rightarrow{} \tuple{[], \Env}}
\end{prooftree} \\ \end{prooftree} \\
\bigskip \bigskip
\begin{prooftree} \begin{prooftree}
\hypo{\Sem{\(\mathit{PE}\)}{\Prog,\Env} \Rightarrow \tuple{S, \_}} \hypo{\Sem{\(\mathit{PE}\)}{\Prog,\Env} \Rightarrow \tuple{S, \_}}
\infer[no rule]1{\Sem{\([E ~|~ \mathit{GE} \mbox{~where~} G]\)}{\Prog, \Env_{[x \mapsto{} v]}} \Rightarrow{} \tuple{A_v, \Env_{[x \mapsto{} v]} \cup{} \Env{}_{v}}, \forall{} v \in{} S } \infer[no rule]1{\Sem{\([E ~|~ \mathit{GE} \mbox{~where~} G]\)}{\Prog, \Env_{[x \mapsto{} v]}} \Rightarrow{} \tuple{A_v, \Env_{[x \mapsto{} v]} \cup{} \Env{}_{v}}, \forall{} v \in{} S }
\infer1[(ListG)]{\Sem{\([E~|~ x \mbox{~in~} \mathit{PE}, \mathit{GE} \mbox{~where~} G]\)}{\Prog, \Env} \Rightarrow{} \infer1[(ListG)]{\Sem{\([E~|~ x \mbox{~in~} \mathit{PE}, \mathit{GE} \mbox{~where~} G]\)}{\Prog, \Env} \Rightarrow{}
\tuple{\mbox{concat} [ A_v~|~v \in{} S ], \Env{} \cup{} \bigcup{}_{v \in{} S} \Env{}_{v}}} \tuple{\mbox{concat} [ A_v~|~v \in{} S ], \Env{} \cup{} \bigcup{}_{v \in{} S} \Env{}_{v}}}
\end{prooftree} \end{prooftree}
\caption{\label{fig:4-rewrite-to-nzn-other} Rewriting rules for partial evaluation \caption{\label{fig:4-rewrite-to-nzn-other} Rewriting rules for partial evaluation
of other \microzinc\ expressions to \nanozinc{}.} of other \microzinc\ expressions to \nanozinc{}.}
\end{figure*} \end{figure*}
Finally, the rules in \cref{fig:4-rewrite-to-nzn-other} handle the evaluation of Finally, the rules in \cref{fig:4-rewrite-to-nzn-other} handle the evaluation of
@ -1028,7 +1028,7 @@ Complex \minizinc\ expression can sometimes result in the creation of many new
variables that represent intermediate results. This is in particular true for variables that represent intermediate results. This is in particular true for
linear and boolean equations that are generally written using \minizinc\ linear and boolean equations that are generally written using \minizinc\
operators. For example the evaluation of the linear constraint \mzninline{x + operators. For example the evaluation of the linear constraint \mzninline{x +
2*y <= z} will result in the following \nanozinc: 2*y <= z} will result in the following \nanozinc:
\begin{nzn} \begin{nzn}
var int: x; var int: x;
@ -1045,7 +1045,7 @@ These \nanozinc\ definitions are correct, but, at least for \gls{mip} solvers,
the existence of the intermediate variables is likely to have a negative impact the existence of the intermediate variables is likely to have a negative impact
on the \gls{solver}'s performance. These \glspl{solver} would likely perform on the \gls{solver}'s performance. These \glspl{solver} would likely perform
better had they received the linear constraint \mzninline{int_lin_le([1,2,-1], better had they received the linear constraint \mzninline{int_lin_le([1,2,-1],
[x,y,z], 0)} directly. Since many solvers support linear constraints, it is [x,y,z], 0)} directly. Since many solvers support linear constraints, it is
often an additional burden to have intermediate values that have to be given a often an additional burden to have intermediate values that have to be given a
value in the solution. value in the solution.
@ -1145,22 +1145,22 @@ unoptimised prototype to a mature piece of software.
]{assets/table/rew_functional.csv}\rewFuncData ]{assets/table/rew_functional.csv}\rewFuncData
\begin{tikzpicture} \begin{tikzpicture}
\begin{axis}[ \begin{axis}[
xbar, xbar,
reverse legend, reverse legend,
legend style={at={(0.82,0.8)}, legend style={at={(0.82,0.8)},
anchor=west,legend columns=1}, anchor=west,legend columns=1},
xlabel={Run-time (ms)}, xlabel={Run-time (ms)},
symbolic y coords={{A(3,6)},{Fib(23)},{Tak(18,12,6)}}, symbolic y coords={{A(3,6)},{Fib(23)},{Tak(18,12,6)}},
ytick=data, ytick=data,
% cycle list name=exotic, % cycle list name=exotic,
nodes near coords, nodes near coords,
nodes near coords align={horizontal}, nodes near coords align={horizontal},
width = \columnwidth, width = \columnwidth,
height = 8cm, height = 8cm,
enlarge y limits={0.3}, enlarge y limits={0.3},
enlarge x limits={0.03}, enlarge x limits={0.03},
xmin = 0, xmin = 0,
xmax = 130, xmax = 130,
] ]
\addplot[fill=cb1, postaction={pattern=grid}] table [y={test}, x={MiniZinc}]{\rewFuncData}; \addplot[fill=cb1, postaction={pattern=grid}] table [y={test}, x={MiniZinc}]{\rewFuncData};
\addplot[fill=cb2, postaction={pattern=north east lines}] table [y={test}, x={Python}]{\rewFuncData}; \addplot[fill=cb2, postaction={pattern=north east lines}] table [y={test}, x={Python}]{\rewFuncData};

View File

@ -6,29 +6,29 @@ In this chapter we study the usage of \gls{half-reif}. When a constraint \mzninl
\begin{itemize} \begin{itemize}
\item Flattening with half reification can naturally produce the relational \item Flattening with half reification can naturally produce the relational
semantics when flattening partial functions in positive contexts. semantics when flattening partial functions in positive contexts.
\item Half reified constraints add no burden to the solver writer; if they \item Half reified constraints add no burden to the solver writer; if they
have a propagator for constraint \mzninline{pred(....)} then they can have a propagator for constraint \mzninline{pred(....)} then they can
straightforwardly construct a half reified propagator for \mzninline{b straightforwardly construct a half reified propagator for \mzninline{b
-> pred(...)}. -> pred(...)}.
\item Half reified constraints \mzninline{b -> pred(...)} can implement fully \item Half reified constraints \mzninline{b -> pred(...)} can implement fully
reified constraints without any loss of propagation strength (assuming reified constraints without any loss of propagation strength (assuming
reified constraints are negatable). reified constraints are negatable).
\item Flattening with half reification can produce more efficient propagation \item Flattening with half reification can produce more efficient propagation
when flattening complex constraints. when flattening complex constraints.
\item Flattening with half reification can produce smaller linear models when \item Flattening with half reification can produce smaller linear models when
used with a mixed integer programming solver. used with a mixed integer programming solver.
\end{itemize} \end{itemize}
\section{Propagation and Half Reification}% \section{Propagation and Half Reification}%
\label{sec:half-propagation} \label{sec:half-propagation}
\begin{itemize} \begin{itemize}
\item custom implemented half-reified propagations. \item custom implemented half-reified propagations.
\item benefits \item benefits
\item downside \item downside
\item experimental results \item experimental results
\end{itemize} \end{itemize}
A propagation engine gains certain advantages from half-reification, but also A propagation engine gains certain advantages from half-reification, but also
@ -40,10 +40,10 @@ the propagator, and hence make its propagation faster.
When full reification is applicable (where we are not using half reified When full reification is applicable (where we are not using half reified
predicates) an alternative to half reification is to implement full reification predicates) an alternative to half reification is to implement full reification
\mzninline{x <-> pred(...)} by two half reified propagators \mzinline{x -> \mzninline{x <-> pred(...)} by two half reified propagators \mzninline{x ->
pred(...)}, \mzninline{y \half \neg pred(...)}, \mzninline{x <-> not y}. This pred(...)}, \mzninline{y \half \neg pred(...)}, \mzninline{x <-> not y}. This
does not lose propagation strength. For Booleans appearing in a positive context does not lose propagation strength. For Booleans appearing in a positive context
we can make the the propagator \mzninline{y -> not pred(...)} run at the lowest we can make the propagator \mzninline{y -> not pred(...)} run at the lowest
priority, since it will never cause failure. Similarly in negative contexts we priority, since it will never cause failure. Similarly in negative contexts we
can make the propagator \mzninline{b -> pred(...)} run at the lowest priority. can make the propagator \mzninline{b -> pred(...)} run at the lowest priority.
This means that Boolean variables are still fixed at the same time, but there is This means that Boolean variables are still fixed at the same time, but there is
@ -56,30 +56,30 @@ less overhead.
\label{sec:half-context} \label{sec:half-context}
\begin{tabular}{ccc} \begin{tabular}{ccc}
\( \(
\begin{array}{lcl} \begin{array}{lcl}
\changepos \rootc & = &\posc \\ \changepos \rootc & = & \posc \\
\changepos \posc & =& \posc \\ \changepos \posc & = & \posc \\
\changepos \negc & =& \negc \\ \changepos \negc & = & \negc \\
\changepos \mixc & =& \mixc \changepos \mixc & = & \mixc
\end{array} \end{array}
\) \)
&~& & ~ &
\( \(
\begin{array}{lcl} \begin{array}{lcl}
\negop \rootc & = &\negc \\ \changeneg \rootc & = & \negc \\
\negop \posc & = & \negc \\ \changeneg \posc & = & \negc \\
\negop \negc & = & \posc \\ \changeneg \negc & = & \posc \\
\negop \mixc & = & \mixc \changeneg \mixc & = & \mixc
\end{array} \end{array}
\) \)
\end{tabular} \end{tabular}
\begin{itemize} \begin{itemize}
\item What (again) are the contexts? \item What (again) are the contexts?
\item Why are they important \item Why are they important
\item When can we use half-reification \item When can we use half-reification
\item How does the analysis work? \item How does the analysis work?
\end{itemize} \end{itemize}
@ -87,7 +87,7 @@ less overhead.
\label{sec:half-flattening} \label{sec:half-flattening}
\begin{itemize} \begin{itemize}
\item cse \item cse
\item chain compression \item chain compression
\end{itemize} \end{itemize}

View File

@ -43,8 +43,8 @@ may be removed again.
The usage of these methods is not new to \gls{constraint-modelling}, and they The usage of these methods is not new to \gls{constraint-modelling}, and they
have proven to be very useful \autocite{schrijvers-2013-combinators, have proven to be very useful \autocite{schrijvers-2013-combinators,
rendl-2015-minisearch, schiendorfer-2018-minibrass, ek-2020-online, rendl-2015-minisearch, schiendorfer-2018-minibrass, ek-2020-online,
ingmar-2020-diverse}. In its most basic form, a simple scripting language is ingmar-2020-diverse}. In its most basic form, a simple scripting language is
sufficient to implement these methods, by repeatedly calling on the sufficient to implement these methods, by repeatedly calling on the
\gls{constraint-modelling} infrastructure to compile and solve the adjusted \gls{constraint-modelling} infrastructure to compile and solve the adjusted
constraint models. While improvements of the compilation of constraint models, constraint models. While improvements of the compilation of constraint models,
@ -566,9 +566,9 @@ The definition of these new functions follows the same pattern as for
\mzninline{sol}, \mzninline{status}, and \mzninline{last_val}. The MiniZinc \mzninline{sol}, \mzninline{status}, and \mzninline{last_val}. The MiniZinc
definition of the \mzninline{uniform_nbh} function is shown in definition of the \mzninline{uniform_nbh} function is shown in
\Cref{lst:6-int-rnd}. \footnote{Random number functions need to be marked as \Cref{lst:6-int-rnd}. \footnote{Random number functions need to be marked as
\mzninline{::impure} for the compiler not to apply \gls{cse} \mzninline{::impure} for the compiler not to apply \gls{cse}
\autocite{stuckey-2013-functions} if they are called multiple times with the \autocite{stuckey-2013-functions} if they are called multiple times with the
same arguments.} Note that the function accepts variable arguments \mzninline{l} same arguments.} Note that the function accepts variable arguments \mzninline{l}
and \mzninline{u}, so that it can be used in combination with other functions, and \mzninline{u}, so that it can be used in combination with other functions,
such as \mzninline{sol}. such as \mzninline{sol}.
@ -693,19 +693,19 @@ the call had on the \nanozinc\ program have to be undone, including results of
propagation, \gls{cse} and other simplifications. propagation, \gls{cse} and other simplifications.
\begin{example}\label{ex:6-incremental} \begin{example}\label{ex:6-incremental}
Consider the following \minizinc\ fragment: Consider the following \minizinc\ fragment:
\begin{mzn} \begin{mzn}
constraint x < 10; constraint x < 10;
constraint y < x; constraint y < x;
\end{mzn} \end{mzn}
After evaluating the first constraint, the domain of \mzninline{x} is changed to After evaluating the first constraint, the domain of \mzninline{x} is changed to
be less than 10. Evaluating the second constraint causes the domain of be less than 10. Evaluating the second constraint causes the domain of
\mzninline{y} to be less than 9. If we now, however, try to remove the first \mzninline{y} to be less than 9. If we now, however, try to remove the first
constraint, it is not just the direct inference on the domain of \mzninline{x} constraint, it is not just the direct inference on the domain of \mzninline{x}
that has to be undone, but also any further effects of those changes --- in this that has to be undone, but also any further effects of those changes --- in this
case, the changes to the domain of \mzninline{y}. case, the changes to the domain of \mzninline{y}.
\end{example} \end{example}
Due to this complex interaction between calls, we only support the removal of Due to this complex interaction between calls, we only support the removal of
@ -779,18 +779,18 @@ therefore support solvers with different levels of an incremental interface:
\begin{itemize} \begin{itemize}
\item Using a non-incremental interface, the solver is reinitialised with the \item Using a non-incremental interface, the solver is reinitialised with the
updated \nanozinc\ program every time. In this case, we still get a updated \nanozinc\ program every time. In this case, we still get a
performance benefit from the improved flattening time, but not from performance benefit from the improved flattening time, but not from
incremental solving. incremental solving.
\item Using a \textit{warm-starting} interface, the solver is reinitialised \item Using a \textit{warm-starting} interface, the solver is reinitialised
with the updated program as above, but it is also given a previous solution with the updated program as above, but it is also given a previous solution
to initialise some internal data structures. In particular for mathematical to initialise some internal data structures. In particular for mathematical
programming solvers, this can result in dramatic performance gains compared programming solvers, this can result in dramatic performance gains compared
to ``cold-starting'' the solver every time. to ``cold-starting'' the solver every time.
\item Using a fully incremental interface, the solver is instructed to apply \item Using a fully incremental interface, the solver is instructed to apply
the changes made by the interpreter. In this case, the trail data structure the changes made by the interpreter. In this case, the trail data structure
is used to compute the set of \nanozinc\ changes since the last choice is used to compute the set of \nanozinc\ changes since the last choice
point. point.
\end{itemize} \end{itemize}
@ -955,8 +955,8 @@ development version of Chuffed; and \chuffedMzn{}, Chuffed performing \gls{lns}
with \flatzinc\ neighbourhoods. These experiments illustrate that the \gls{lns} with \flatzinc\ neighbourhoods. These experiments illustrate that the \gls{lns}
implementations indeed perform well compared to the standard implementations indeed perform well compared to the standard
solvers.\footnote{Our implementations are available at solvers.\footnote{Our implementations are available at
\texttt{\justify{}https://github.com/Dekker1/\{libminizinc,gecode,chuffed\}} on \texttt{\justify{}https://github.com/Dekker1/\{libminizinc,gecode,chuffed\}} on
branches containing the keyword \texttt{on\_restart}.} All experiments were run branches containing the keyword \texttt{on\_restart}.} All experiments were run
on a single core of an Intel Core i5 CPU @ 3.4 GHz with 4 cores and 16 GB RAM on a single core of an Intel Core i5 CPU @ 3.4 GHz with 4 cores and 16 GB RAM
running macOS High Sierra. \gls{lns} benchmarks are repeated with 10 different running macOS High Sierra. \gls{lns} benchmarks are repeated with 10 different
random seeds and the average is shown. The overall timeout for each run is 120 random seeds and the average is shown. The overall timeout for each run is 120
@ -999,9 +999,9 @@ periods, which is done via the variables \mzninline{period_of} in the model.
one period and frees all courses that are assigned to it. one period and frees all courses that are assigned to it.
\begin{table} \begin{table}
\centering \centering
\input{assets/table/6_gbac} \input{assets/table/6_gbac}
\caption{\label{tab:6-gbac}\texttt{gbac} benchmarks} \caption{\label{tab:6-gbac}\texttt{gbac} benchmarks}
\end{table} \end{table}
The results for \texttt{gbac} in \cref{tab:6-gbac} show that the overhead The results for \texttt{gbac} in \cref{tab:6-gbac} show that the overhead
@ -1030,9 +1030,9 @@ in the incumbent solution. These orders can then be freely reassigned to any
other slab. other slab.
\begin{table} \begin{table}
\centering \centering
\input{assets/table/6_steelmillslab} \input{assets/table/6_steelmillslab}
\caption{\label{tab:6-steelmillslab}\texttt{steelmillslab} benchmarks} \caption{\label{tab:6-steelmillslab}\texttt{steelmillslab} benchmarks}
\end{table} \end{table}
For this problem a solution with zero wastage is always optimal. The use of For this problem a solution with zero wastage is always optimal. The use of
@ -1064,9 +1064,9 @@ structured neighbourhood for this model. It randomly selects a time interval of
one-tenth the length of the planning horizon and frees all tasks starting in one-tenth the length of the planning horizon and frees all tasks starting in
that time interval, which allows a reshuffling of these tasks. that time interval, which allows a reshuffling of these tasks.
\begin{table}[b] \begin{table}[b]
\centering \centering
\input{assets/table/6_rcpsp-wet} \input{assets/table/6_rcpsp-wet}
\caption{\label{tab:6-rcpsp-wet}\texttt{rcpsp-wet} benchmarks.} \caption{\label{tab:6-rcpsp-wet}\texttt{rcpsp-wet} benchmarks.}
\end{table} \end{table}
\cref{tab:6-rcpsp-wet} shows that \gecodeReplay\ and \gecodeMzn\ perform almost \cref{tab:6-rcpsp-wet} shows that \gecodeReplay\ and \gecodeMzn\ perform almost

View File

@ -25,16 +25,16 @@ the rules for identifiers and annotations.
% Items % Items
<item> ::= <include-item> <item> ::= <include-item>
\alt <var-decl-item> \alt <var-decl-item>
\alt <enum-item> \alt <enum-item>
\alt <assign-item> \alt <assign-item>
\alt <constraint-item> \alt <constraint-item>
\alt <solve-item> \alt <solve-item>
\alt <output-item> \alt <output-item>
\alt <predicate-item> \alt <predicate-item>
\alt <test-item> \alt <test-item>
\alt <function-item> \alt <function-item>
\alt <annotation-item> \alt <annotation-item>
<ti-expr-and-id> ::= <type-inst> ":" <ident> <ti-expr-and-id> ::= <type-inst> ":" <ident>
@ -64,20 +64,20 @@ the rules for identifiers and annotations.
\alt "solve" <annotations> "minimize" <expr> \alt "solve" <annotations> "minimize" <expr>
\alt "solve" <annotations> "maximize" <expr> \alt "solve" <annotations> "maximize" <expr>
% Output items % Output items
<output-item> ::= "output" <expr> <output-item> ::= "output" <expr>
% Annotation items % Annotation items
<annotation-item> ::= "annotation" <ident> <params> <annotation-item> ::= "annotation" <ident> <params>
% Predicate, test and function items % Predicate, test and function items
<predicate-item> ::= "predicate" <operation-item-tail> <predicate-item> ::= "predicate" <operation-item-tail>
<test-item> ::= "test" <operation-item-tail> <test-item> ::= "test" <operation-item-tail>
<function-item> ::= "function" <type-inst> ":" <operation-item-tail> <function-item> ::= "function" <type-inst> ":" <operation-item-tail>
<operation-item-tail> ::= <ident> <params> <annotations> [ "=" <expr> ] <operation-item-tail> ::= <ident> <params> <annotations> [ "=" <expr> ]
<params> ::= [ ( <ti-expr-and-id> "," ... ) ] <params> ::= [ ( <ti-expr-and-id> "," ... ) ]
@ -236,8 +236,8 @@ the rules for identifiers and annotations.
% Annotation literals % Annotation literals
<ann-literal> ::= <ident> [ "(" <expr> "," ... ")" ] <ann-literal> ::= <ident> [ "(" <expr> "," ... ")" ]
% If-then-else expressions % If-then-else expressions
<if-then-else-expr> ::= "if" <expr> "then" <expr> [ "elseif" <expr> "then" <expr> ]* "else" <expr> "endif" <if-then-else-expr> ::= "if" <expr> "then" <expr> [ "elseif" <expr> "then" <expr> ]* "else" <expr> "endif"
% Call expressions % Call expressions
<call-expr> ::= <ident-or-quoted-op> [ "(" <expr> "," ... ")" ] <call-expr> ::= <ident-or-quoted-op> [ "(" <expr> "," ... ")" ]
@ -259,7 +259,7 @@ the rules for identifiers and annotations.
% % Miscellaneous % % Miscellaneous
% Identifiers % Identifiers
<ident> ::= \(regexp\left(\texttt{[A-Za-z][A-Za-z0-9\_]*}\right)\) <ident> ::= \(regexp\left(\texttt{[A-Za-z][A-Za-z0-9\_]*}\right)\)
\alt \(regexp\left(\verb|'[^'\xa\xd\x0]*'|\right)\) \alt \(regexp\left(\verb|'[^'\xa\xd\x0]*'|\right)\)
% Identifiers and quoted operators % Identifiers and quoted operators
<ident-or-quoted-op> ::= <ident> <ident-or-quoted-op> ::= <ident>

File diff suppressed because it is too large Load Diff

View File

@ -94,7 +94,7 @@ following publication:
\printbibliography[heading=none] \printbibliography[heading=none]
\end{refsection} \end{refsection}
\chapter{Acknoledgements} \chapter{Acknowledgements}
\mainmatter{} \mainmatter{}