Grammar check for the glossary

This commit is contained in:
Jip J. Dekker 2021-07-24 12:07:12 +10:00
parent b6a54d384b
commit e25e0d46c4
No known key found for this signature in database
GPG Key ID: 517DF4A00618C9C3
11 changed files with 101 additions and 101 deletions

View File

@ -120,7 +120,7 @@
bibsource = {dblp computer science bibliography, https://dblp.org},
}
@inproceedings{belov-2016-linearisation,
@inproceedings{belov-2016-linearization,
author = {Gleb Belov and Peter J. Stuckey and Guido Tack and Mark Wallace},
editor = {Michel Rueher},
title = {Improved Linearization of Constraint Programming Models},

View File

@ -23,7 +23,7 @@
description={
A mapping from \glspl{prb-par} or \glspl{variable} to values.
An assignment is referred to as \gls{partial} when it maps only a subset of the \glspl{prb-par} and \glspl{variable} of a \gls{model}.
A \gls{parameter-assignment} is an \gls{partial} \gls{assignment} that maps only the \glspl{prb-par}, a \gls{variable-assignment} maps only the \glspl{variable}.
A \gls{parameter-assignment} is a \gls{partial} \gls{assignment} that maps only the \glspl{prb-par}, a \gls{variable-assignment} maps only the \glspl{variable}.
A complete \gls{parameter-assignment} maps all \glspl{prb-par} of a \gls{model}, a complete \gls{variable-assignment} maps all \glspl{variable} of a \gls{model}
},
}
@ -52,13 +52,13 @@
name={auxiliary variable},
description={
A \gls{variable} not explicitly defined in a \gls{model}, but rather introduced in the \gls{rewriting} of an \gls{instance}.
Auxiliary \glspl{variable} are generally introduced to redefine an existing \gls{variable} using \glspl{variable} of a different type, to represent an sub-expression, or to connect \glspl{constraint} in a \gls{decomp}
Auxiliary \glspl{variable} are generally introduced to redefine an existing \gls{variable} using \glspl{variable} of a different type, to represent a sub-expression, or to connect \glspl{constraint} in a \gls{decomp}
},
}
\newglossaryentry{backtrack}{
name={backtrack},
description={A \gls{solver} is said to backtrack when it revisits a search decision (\eg{} an value assumed during search). Notably the action of backtracking generally involves invalidating all effects of the revisited and subsequent search decisions},
description={A \gls{solver} is said to backtrack when it revisits a search decision (\eg{} a value assumed during search). Notably the action of backtracking generally involves invalidating all effects of the revisited and subsequent search decisions},
}
\newglossaryentry{binding}{
@ -68,7 +68,7 @@
\newglossaryentry{bnb}{
name={branch and bound},
description={A search method to find an \gls{opt-sol}. After an initial \gls{sol} is found, the search continuously looks for \glspl{sol} for which the \gls{objective} will return a better value than the incumbent \gls{sol}. If it is proven that no such \gls{sol} exists, then the final incumbent \gls{sol} is an \gls{opt-sol}},
description={A search method to find a \gls{opt-sol}. After an initial \gls{sol} is found, the search continuously looks for \glspl{sol} for which the \gls{objective} will return a better value than the incumbent \gls{sol}. If it is proven that no such \gls{sol} exists, then the final incumbent \gls{sol} is a \gls{opt-sol}},
}
\newglossaryentry{byte-code}{
@ -76,8 +76,8 @@
description={A set of instructions designed to be efficiently executed by an \gls{interpreter}. Distinctive from other computer languages a byte code has a very compact representation, \eg{} using only numbers, and can not be directly read by humans},
}
\newglossaryentry{booleanisation}{
name={Booleanisation},
\newglossaryentry{booleanization}{
name={Booleanization},
description={The process of \gls{rewriting} a \gls{model} to a \gls{sat} or \gls{maxsat} problem},
}
@ -109,7 +109,7 @@
\newglossaryentry{comprehension}{
name={comprehension},
description={A syntactic construct available in \minizinc{} to construct sets and \glspl{array}. It constructs the elements based on an expression initialised by a \gls{generator}},
description={A syntactic construct available in \minizinc{} to construct sets and \glspl{array}. It constructs the elements based on an expression initialized by a \gls{generator}},
}
\newglossaryentry{conditional}{
@ -119,7 +119,7 @@
\newglossaryentry{constraint}{
name={constraint},
description={A formalised rule of a problem. Constraints are generally expressed in term of Boolean logic},
description={A formalized rule of a problem. Constraints are generally expressed in terms of Boolean logic},
}
\newglossaryentry{cml}{
@ -190,7 +190,7 @@
name={decision problem},
description={
A problem which can be defined as making a set of decisions under a certain set of rules.
Decisions problems can be formalised as \glspl{model}.
Decisions problems can be formalized as \glspl{model}.
},
}
@ -198,7 +198,7 @@
name={decomposition},
description={
A formulation of a \gls{constraint} in terms of other \glspl{constraint} in order to reach \gls{native} \glspl{constraint}.
Note that the new \glspl{constraint} can represent the same decisions using a different \glspl{variable}, possibly of different types
Note that the new \glspl{constraint} can represent the same decisions using different \glspl{variable}, possibly of different types
},
}
@ -249,14 +249,14 @@
\newglossaryentry{generator}{
name={generator},
description={An },
description={An expression in \gls{minizinc} that evaluates to a sequence of elements. These elements are then generally used to instantiate an identifier in a \gls{comprehension} expression.},
}
\newglossaryentry{global}{
name={global constraint},
description={
A common \gls{constraint} pattern.
These patterns can generally span any number of \glspl{variable}.
These patterns can generally extend to any number of \glspl{variable}.
For example, the well-known global constraint \( AllDifferent(\ldots) \) requires all its arguments to take a different value.
Global constraints are usually not \gls{native} to a \gls{solver}.
Instead, the \gls{rewriting} process can enforce the global constraint using a \gls{decomp}
@ -314,8 +314,8 @@
},
}
\newglossaryentry{linearisation}{
name={linearisation},
\newglossaryentry{linearization}{
name={linearization},
description={The process of \gls{rewriting} a \gls{model} to a mixed integer program},
}
@ -332,16 +332,16 @@
\newglossaryentry{gls-lns}{
name={large neighbourhood search},
description={
A \gls{meta-optimisation} strategy that repeatedly reduces the \gls{search-space} by applying different \glspl{neighbourhood} often based on a previous \gls{sol}. Large neighbourhood search is an well-known method to quickly improve a \gls{sol}. Large neighbourhood search is not guaranteed to find the \gls{opt-sol} and if it does, then it will be unable to prove that it did
A \gls{meta-optimization} strategy that repeatedly reduces the \gls{search-space} by applying different \glspl{neighbourhood} often based on a previous \gls{sol}. Large neighbourhood search is a well-known method to quickly improve a \gls{sol}. Large neighbourhood search is not guaranteed to find the \gls{opt-sol} and if it does, then it will be unable to prove that it did
},
}
\newglossaryentry{meta-optimisation}{
name={meta-optimisation},
\newglossaryentry{meta-optimization}{
name={meta-optimization},
description={
Methods used to improve on the general techniques used by \glspl{solver} to look for \glspl{sol}, such as \gls{bnb}.
Meta-optimisation can be used for a myriad of reasons, such as to quickly improving on an existing \gls{sol}, to find a set of diverse \glspl{sol}, or to use of complex or multiple \glspl{objective}.
Although meta-optimisation methods can be implemented by adjusting the search method in a \gls{solver}, they are often emulated through the adjustment of \glspl{slv-mod} based on the results from the \gls{solver}
Although meta-optimization methods can be implemented by adjusting the search method in a \gls{solver}, they are often emulated through the adjustment of \glspl{slv-mod} based on the results from the \gls{solver}
},
}
@ -353,16 +353,16 @@
\newglossaryentry{minisearch}{
name={Mini\-Search},
description={
\textcite{rendl-2015-minisearch} introduced MiniSearch as a way to describe \gls{meta-optimisation} methods from within a \gls{minizinc} model.
The specified \gls{meta-optimisation} method would be performed either by repeatedly calling a \gls{solver} with an updated \gls{slv-mod} or using an \gls{solver} extended with a MiniSearch interface
\textcite{rendl-2015-minisearch} introduced MiniSearch as a way to describe \gls{meta-optimization} methods from within a \gls{minizinc} model.
The specified \gls{meta-optimization} method would be performed either by repeatedly calling a \gls{solver} with an updated \gls{slv-mod} or using a \gls{solver} extended with a MiniSearch interface
},
}
\newglossaryentry{model}{
name={constraint model},
description={
A formalisation of a \gls{dec-prb} or an \gls{opt-prb}.
It is defined in terms of formalised decision, \glspl{variable} of different kinds (\eg{} Boolean, integers, or even sets), and \glspl{constraint}, Boolean logic formulas which are forced to hold in any \gls{sol}, and, in case of an \gls{opt-prb}, an \gls{objective}.
A formalization of a \gls{dec-prb} or an \gls{opt-prb}.
It is defined in terms of formalized decision, \glspl{variable} of different kinds (\eg{} Boolean, integers, or even sets), and \glspl{constraint}, Boolean logic formulas which are forced to hold in any \gls{sol}, and, in case of an \gls{opt-prb}, an \gls{objective}.
Any external data required to formulate the \glspl{constraint} is said to be the \glspl{prb-par}.
The combination of a constraint model and \gls{assignment}s for its \glspl{prb-par} is said to be an \gls{instance} of the constraint model
},
@ -422,16 +422,16 @@
}
\newglossaryentry{gls-opl}{
name={OPL:\ The optimisation modelling language},
description={A \gls{cml} aimed at combining the powers of mathematical programming and \gls{cp}. The OPL is excels at the modelling of scheduling problems. See \cref{sub:back-opl} },
name={OPL:\ The optimization modelling language},
description={A \gls{cml} aimed at combining the powers of mathematical programming and \gls{cp}. The OPL is excels at the modelling of scheduling problems. See \cref{sub:back-opl}},
}
\newglossaryentry{opt-prb}{
name={optimisation problem},
name={optimization problem},
description={
A decision problem with an additional \gls{objective}.
It can, likewise, be formalised as a \gls{model}.
If the problem has multiple \glspl{sol}, then this function can be used to asses the quality on the \gls{sol}.
It can, likewise, be formalized as a \gls{model}.
If the problem has multiple \glspl{sol}, then this function can be used to assess the quality on the \gls{sol}.
For such a problem, an \gls{opt-sol} is a \gls{sol} with the highest quality
},
}
@ -455,9 +455,9 @@
}
\newglossaryentry{rbmo}{
name={restart based meta-optimisation},
name={restart based meta-optimization},
description={
A technique to apply \gls{meta-optimisation} methods within the \gls{solver}.
A technique to apply \gls{meta-optimization} methods within the \gls{solver}.
The \gls{solver} is extended with \gls{native} \glspl{constraint} that set \glspl{variable} to a \gls{fixed} value on every \gls{restart}.
See \cref{sec:inc-modelling,sec:inc-solver-extension}
},
@ -620,7 +620,7 @@
\newglossaryentry{trs}{
name={term rewriting system},
description={A computational model that expresses computation through the application of rewriting rules. A rewriting rule replaces the \glspl{term} on its left hand side with the \glspl{term} on its right hand side},
description={A computational model that expresses computation through the application of rewriting rules. A rewriting rule replaces the \glspl{term} on its left-hand side with the \glspl{term} on its right-hand side},
}
\newglossaryentry{unsat}{
@ -631,7 +631,7 @@
\newglossaryentry{variable}{
name={decision variable},
description={
A formalised decision that is yet to be made.
A formalized decision that is yet to be made.
When searching for a \gls{sol} a decision variable is said to have a certain \gls{domain}, which contains the values that the decision variable can still take.
If at any point the \gls{domain} is reduced to a single value, then the decision variable is said to be \gls{fixed}.
If, however, a decision variable has an empty \gls{domain}, then there is no value it can take that is consistent with the \glspl{constraint}

View File

@ -17,7 +17,7 @@ Distinctively, the \minizinc{}'s \gls{rewriting} process is founded on its funct
It generates \glspl{slv-mod} through the application of increasingly complex \minizinc{} functions from \solver{}-specific libraries.
Consequently, the efficiency of the functional evaluation of the language can be a limiting factor.
For many applications, the current \minizinc{} implementation now requires a significant, and sometimes prohibitive, amount of time to rewrite \instances{}.
This problem is exacerbated by the emerging use of \gls{meta-optimisation} algorithms, which require solving a sequence of closely related \instances{}.
This problem is exacerbated by the emerging use of \gls{meta-optimization} algorithms, which require solving a sequence of closely related \instances{}.
In this thesis we revisit the \gls{rewriting} of functional \cmls{} into \glspl{slv-mod}.
We design and evaluate an architecture for \cmls{} that can accommodate its the modern uses.
@ -28,7 +28,7 @@ In addition, we incorporate new analysis techniques to avoid the use of \glspl{r
Crucially, the architecture is designed to incorporate incremental \constraint{} modelling in two ways.
Primarily, the \gls{rewriting} process is fully incremental: changes made to the \instance{} through a provided interface require minimal addition \gls{rewriting} effort.
Moreover, we introduce \gls{rbmo}, a way to specify \gls{meta-optimisation} algorithms directly in \minizinc{}.
Moreover, we introduce \gls{rbmo}, a way to specify \gls{meta-optimization} algorithms directly in \minizinc{}.
These specification are executed by a normal \minizinc{} \solver{}, requiring only a slight extension of its capabilities.
Together, the functionality of this architecture helps make \cmls{} a more powerful and attractive approach to solve real world problems.

View File

@ -159,14 +159,14 @@ We design and evaluate an architecture for \cmls{} that can accommodate the mode
It is an expressive language that, in addition to user-defined functions, gives modellers access to advanced features, such as many types of \variables{}, annotations, and an extensive standard library of \constraints{}.
All of these can be used with all \solver{} targets.
Because of the popularity and maturity of the language, there is a large suite of \minizinc{} models available that can be used as benchmarks.
The language has also been used in multiple studies as a host for meta-optimisation techniques \autocite{ingmar-2020-diverse,ek-2020-online}.
The language has also been used in multiple studies as a host for meta-optimization techniques \autocite{ingmar-2020-diverse,ek-2020-online}.
A \minizinc{} model generally consists of a few loops or \glspl{comprehension}; for a \gls{cp} solver, this would be rewritten into a relatively small set of \constraints{} which would be fed whole into the solver.
The existing process for translating \minizinc{} into a \gls{slv-mod} is a somewhat ad-hoc, (mostly) single-pass, recursive unrolling procedure, and many aspects (such as call overloading) are resolved dynamically.
In its original application, this was acceptable: models (both before and after \gls{rewriting}) were small.
Now, the language is also used to target low-level \solvers{}, such as \gls{sat} and \gls{mip}.
For them, the encoding of the same \minizinc{} \instance{} results in a much larger \gls{slv-mod}.
Additionally, \minizinc{} is used in progressively more \gls{meta-optimisation} toolchains.
Additionally, \minizinc{} is used in progressively more \gls{meta-optimization} toolchains.
To a great extent, this is testament to the effectiveness of the language.
However, as they have become more common, these extended uses have revealed weaknesses of the existing \minizinc{} tool chain.
In particular:
@ -190,7 +190,7 @@ In particular:
And, it also does not consider \gls{half-reif}, a cheaper alternative to \gls{reif} that is often applicable.
\item Monolithic \gls{rewriting} is wasteful.
When \minizinc{} is used as part of a meta-optimisation toolchain, there is typically a large base model common to all sub-problems, and a small set of \constraints{} which are added or removed in each iteration.
When \minizinc{} is used as part of a meta-optimization toolchain, there is typically a large base model common to all sub-problems, and a small set of \constraints{} which are added or removed in each iteration.
But with the existing \minizinc{} architecture, the whole model must be rewritten each time.
Many use cases involve generating a base model, then repeatedly adding or removing a few \constraints{} before re-solving.
In the current tool chain, the whole model must be fully rewritten to a \gls{slv-mod} each time.
@ -222,7 +222,7 @@ In the design of this architecture, we analyse the \gls{rewriting} process from
We first determine the foundation for the system: an execution model for the basic \gls{rewriting} system.
To ensure the quality of the produced \glspl{slv-mod}, we extend the system with many well-known simplification techniques.
In addition, we experiment with several new approaches to optimize the resulting \glspl{slv-mod}, including the use of \gls{half-reif}.
Crucially, we ensure that the resulting system is fully incremental, and we experiment with several \gls{meta-optimisation} applications to evaluate this.
Crucially, we ensure that the resulting system is fully incremental, and we experiment with several \gls{meta-optimization} applications to evaluate this.
Overall, this thesis makes the following contributions.
@ -270,7 +270,7 @@ We conclude this chapter by analysing the impact of the usage of \gls{half-reif}
\emph{\Cref{ch:incremental}} focuses on the development of incremental methods for \cmls{}.
We first present a novel technique that eliminates the need for the incremental \gls{rewriting}.
Instead, it integrates a meta-optimisation specification, written in the \minizinc{} language, into the \gls{slv-mod}.
Instead, it integrates a meta-optimization specification, written in the \minizinc{} language, into the \gls{slv-mod}.
Then, we describe a technique to optimize the \gls{rewriting} process for incremental changes to an \instance{}.
This method ensures that no work is done for parts of the \instance{} that remain unchanged.
We conclude this chapter by testing the performance and computational overhead of these two techniques.

View File

@ -749,11 +749,11 @@ Modern \solvers{}, such as \gls{cbc} \autocite{forrest-2020-cbc}, \gls{cplex} \a
These \solvers{} are therefore prime targets to solve \minizinc{} \instances{}.
To solve an \instance{} of a \cmodel{}, it can be rewritten into a mixed integer program.
This process is known as \gls{linearisation}.
This process is known as \gls{linearization}.
It does, however, come with its challenges.
Many \minizinc{} models contain \constraint{} that are not linear (in-)equations.
The translation of a single such \constraint{} can introduce many linear \constraints{} and \glspl{avar}.
For example, when a \constraint{} reasons about the value that a variable takes, the \gls{linearisation} process introduces indicator variables.
For example, when a \constraint{} reasons about the value that a variable takes, the \gls{linearization} process introduces indicator variables.
The indicator variables \(y_{i}\) are \gls{avar} that for a \variable{} \(x\) take the value 1 if \(x = i\) and 0 otherwise.
\Constraints{} reasoning about the value of \(x\) are then rewritten as linear \constraints{} using the \variables{} \(y_{i}\).
@ -804,7 +804,7 @@ Modern day \gls{sat} solvers, like Clasp \autocite{gebser-2012-clasp}, Kissat \a
Many real world problems modelled using \cmls{} directly correspond to \gls{sat}.
However, even problems that contain \variables{} with types other than Boolean can still be encoded as a \gls{sat} problem.
This process is known as \gls{booleanisation}.
This process is known as \gls{booleanization}.
Depending on the problem, using a \gls{sat} \solvers{} to solve the encoded problem can still be the most efficient way to solve the problem.
\begin{example}
@ -1108,9 +1108,9 @@ At its core, however, the \gls{rewriting} of \gls{ess-prime} works very differen
\Gls{ess-prime} is rewritten using a more traditional \compiler{}.
For all concepts in the language the \compiler{} intrinsically knows how to rewrite it for its target \solver{}.
Recently, \textcite{gokberk-2020-ess-incr} have also presented Savile Row as the basis of a \gls{meta-optimisation} toolchain.
Recently, \textcite{gokberk-2020-ess-incr} have also presented Savile Row as the basis of a \gls{meta-optimization} toolchain.
The authors extend Savile Row to bridge the gap between the incremental assumption interface of \gls{sat} \solvers{} and the modelling language and show how this helps to efficiently solve pattern mining and optimisation problems.
Consequently, this research reiterates the use of \gls{meta-optimisation} algorithms in \cmls{} and the need for incremental bridging between the modelling language and the \solver{}.
Consequently, this research reiterates the use of \gls{meta-optimization} algorithms in \cmls{} and the need for incremental bridging between the modelling language and the \solver{}.
\section{Term Rewriting}%
\label{sec:back-term}

View File

@ -614,7 +614,7 @@ Propagating the effect of one \constraint{} can thus trigger the propagation of
The \nanozinc{} \gls{interpreter} applies \glspl{propagator} for the most important \constraints{}, such as linear and Boolean \constraints{}, which can lead to significant reductions in the size of the generated \gls{slv-mod}.
It has been shown previously that applying full \gls{propagation} during compilation can be beneficial \autocite{leo-2015-multipass}.
This works particularly well when the target solver requires complicated reformulations, such as linearisation for \gls{mip} \solvers{} \autocite{belov-2016-linearisation}.
This works particularly well when the target solver requires complicated reformulations, such as linearization for \gls{mip} \solvers{} \autocite{belov-2016-linearization}.
Returning to the example above, \gls{propagation} (and other model simplifications) can result in equality \constraints{} \mzninline{x=y} being introduced.
For certain types of target \solvers{}, in particular those based on \gls{cp}, equality \constraints{} can lead to exponentially larger search trees if not detected and simplified.

View File

@ -183,11 +183,11 @@ If, however, a \gls{reif} is replaced by a \gls{half-reif}, then only one of the
constraint i >= 5 - 5 * b; % not b -> i >= 5
\end{mzn}
Instead, if we could determine that the \constraint{} could be \gls{half-reified}, then the \gls{linearisation} could be simplified to only the first \constraint{}.
Instead, if we could determine that the \constraint{} could be \gls{half-reified}, then the \gls{linearization} could be simplified to only the first \constraint{}.
\end{example}
The same principle can be applied all throughout the \gls{linearisation} process.
Ultimately, \minizinc{}'s \gls{linearisation} library rewrites most \glspl{reif} in terms of implied less than or equal to \constraints{}.
The same principle can be applied all throughout the \gls{linearization} process.
Ultimately, \minizinc{}'s \gls{linearization} library rewrites most \glspl{reif} in terms of implied less than or equal to \constraints{}.
For all these \glspl{reif}, its replacement by a \gls{half-reif} can remove half of the implications required for the \gls{reif}.
For \gls{sat} solvers, a \gls{decomp} for a \gls{half-reif} can be created from its regular \gls{decomp}.
@ -931,7 +931,7 @@ In the 32-4-8 group, we even see that usage of the \gls{propagator} allows us to
The usage of context analysis and introduction of \glspl{half-reif} allows us to evaluate the usage of \gls{half-reif} on a larger scale.
In our second experiment we assess its effects on the \gls{rewriting} and solving of the \instances{} of the 2019 and 2020 \minizinc{} challenge \autocite{stuckey-2010-challenge,stuckey-2014-challenge}.
These experiments are conducted using the \gls{gecode} \solver{}, which has propagators for \glspl{half-reif} of many basic \constraints{}, and \minizinc{}'s \gls{linearisation} and \gls{booleanisation} libraries, which has been adapted to use \gls{half-reif} as earlier described.
These experiments are conducted using the \gls{gecode} \solver{}, which has propagators for \glspl{half-reif} of many basic \constraints{}, and \minizinc{}'s \gls{linearization} and \gls{booleanization} libraries, which has been adapted to use \gls{half-reif} as earlier described.
The \minizinc{} instances are rewritten using the \minizinc{} 2.5.5 \compiler{}, which can enable and disable the usage of \gls{half-reif}.
The solving of the linearised \instances{} are tested using the \gls{cbc} and \gls{cplex} \gls{mip} \solvers{}.
The solving of the Booleanised \instances are testing using the \gls{openwbo} \solver{}.
@ -943,11 +943,11 @@ is
\end{subtable}
\begin{subtable}[b]{\linewidth}
\input{assets/table/half_flat_linear}
\caption{\label{subtab:half-flat-lin}\Gls{linearisation} library}
\caption{\label{subtab:half-flat-lin}\Gls{linearization} library}
\end{subtable}
\begin{subtable}[b]{\linewidth}
\input{assets/table/half_flat_sat}
\caption{\label{subtab:half-flat-bool}\Gls{booleanisation} library}
\caption{\label{subtab:half-flat-bool}\Gls{booleanization} library}
\end{subtable}
\caption{\label{tab:half-rewrite} Cumulative statistics of \gls{rewriting} all \minizinc{} \instances{} from \minizinc{} challenge 2019 \& 2020 (200 \instances{}).}
\end{table}
@ -971,17 +971,17 @@ This replacement happens mostly 1-for-1; the difference between the number of \g
In many models, no implications can be removed, but for some problems an implication is removed for every \gls{half-reif} that is introduced.
Finally, the overhead of the introduction of \gls{half-reif} and the newly introduced optimisation techniques is minimal.
The \Cref{subtab:half-flat-lin} paints an equally positive picture for the usage of \glspl{half-reif} for linearisation.
The \Cref{subtab:half-flat-lin} paints an equally positive picture for the usage of \glspl{half-reif} for linearization.
Since both \glspl{reif} and \glspl{half-reif} are decomposed during the \gls{rewriting} process, the usage of \gls{half-reif} is able to remove almost 7.5\% of the overall \constraints{}.
The ratio of \glspl{reif} that is replaced with \glspl{half-reif} is not as high as \gls{gecode}.
This is caused by the fact that the linearisation process requires full \gls{reif} in the decomposition of many \glspl{global}.
This is caused by the fact that the linearization process requires full \gls{reif} in the decomposition of many \glspl{global}.
Similar to \gls{gecode}, the number of implications that is removed is dependent on the problem.
Lastly, the \gls{rewriting} time slightly increases for the linearisation process.
Lastly, the \gls{rewriting} time slightly increases for the linearization process.
Since there are many more \constraints{}, the introduced optimisation mechanisms have an slightly higher overhead.
Finally, statistics for the \gls{rewriting} the \instances{} for a \gls{maxsat} \solver{} is shown in \cref{subtab:half-flat-bool}.
Unlike linearisation, the usage of \gls{half-reif} does not significantly reduces number of \constraints{} and, although still significant, the number of \glspl{reif} is reduced by only slightly over ten percent.
We also see that that the \gls{booleanisation} library is explicitly defined in terms of \glspl{half-reif}.
Unlike linearization, the usage of \gls{half-reif} does not significantly reduces number of \constraints{} and, although still significant, the number of \glspl{reif} is reduced by only slightly over ten percent.
We also see that that the \gls{booleanization} library is explicitly defined in terms of \glspl{half-reif}.
Some \constraints{} manually introduce \mzninline{_imp} call as part of their definition.
Even when we do not automatically introduce \glspl{half-reif}, they are still introduced by other \constraints{}.
Furthermore, the usage of \gls{chain-compression} does not seem to have any effect.

View File

@ -9,16 +9,16 @@
\section{Modelling of Restart Based Meta-Optimisation}\label{sec:inc-modelling}
This section introduces a \minizinc{} extension that enables modellers to define \gls{meta-optimisation} algorithms in \minizinc{}.
This section introduces a \minizinc{} extension that enables modellers to define \gls{meta-optimization} algorithms in \minizinc{}.
This extension is based on the construct introduced in \minisearch{} \autocite{rendl-2015-minisearch}, as summarised below.
\subsection{Meta-Optimisation in MiniSearch}\label{sec:inc-minisearch}
\minisearch{} introduced a \minizinc{} extension that enables modellers to express \gls{meta-optimisation} inside a \minizinc{} model.
A \gls{meta-optimisation} algorithm in \minisearch{} typically solves a given \minizinc{} model, performs some calculations on the solution, adds new \constraints{} and then solves again.
\minisearch{} introduced a \minizinc{} extension that enables modellers to express \gls{meta-optimization} inside a \minizinc{} model.
A \gls{meta-optimization} algorithm in \minisearch{} typically solves a given \minizinc{} model, performs some calculations on the solution, adds new \constraints{} and then solves again.
Most \gls{meta-optimisation} definitions in \minisearch{} consist of two parts.
The first part is a declarative definition of any restriction to the \gls{search-space} that the \gls{meta-optimisation} algorithm can apply, called a \gls{neighbourhood}.
Most \gls{meta-optimization} definitions in \minisearch{} consist of two parts.
The first part is a declarative definition of any restriction to the \gls{search-space} that the \gls{meta-optimization} algorithm can apply, called a \gls{neighbourhood}.
In \minisearch{} these definitions can make use of the following function.
\begin{mzn}
@ -41,10 +41,10 @@ For example, \mzninline{uniform_neighbourhood(x, 0.2)} would result in each vari
\begin{listing}
\mznfile{assets/listing/inc_lns_minisearch.mzn}
\caption{\label{lst:inc-lns-minisearch} A simple \gls{lns} \gls{meta-optimisation} implemented in \minisearch{}}
\caption{\label{lst:inc-lns-minisearch} A simple \gls{lns} \gls{meta-optimization} implemented in \minisearch{}}
\end{listing}
The second part of a \minisearch{} \gls{meta-optimisation} is the \gls{meta-optimisation} algorithm itself.
The second part of a \minisearch{} \gls{meta-optimization} is the \gls{meta-optimization} algorithm itself.
\Cref{lst:inc-lns-minisearch} shows a basic \minisearch{} implementation of a basic \gls{lns}, called \mzninline{lns}.
It performs a fixed number of iterations, each invoking the \gls{neighbourhood} predicate \mzninline{uniform_neighbourhood} in a fresh scope.
This means that the \constraints{} only affect the current loop iteration.
@ -52,22 +52,22 @@ It then searches for a solution (\mzninline{minimize_bab}) with a given timeout.
If the search does return a new solution, then it commits to that solution and it becomes available to the \mzninline{sol} function in subsequent iterations.
The \mzninline{lns} function also posts the \constraint{} \mzninline{obj < sol(obj)}, ensuring the objective value in the next iteration is strictly better than that of the current solution.
Although \minisearch{} enables the modeller to express \glspl{neighbourhood} in a declarative way, the definition of the \gls{meta-optimisation} algorithm is rather unintuitive and difficult to debug, leading to unwieldy code for defining even simple algorithm.
Although \minisearch{} enables the modeller to express \glspl{neighbourhood} in a declarative way, the definition of the \gls{meta-optimization} algorithm is rather unintuitive and difficult to debug, leading to unwieldy code for defining even simple algorithm.
Furthermore, the \minisearch{} implementation requires either a close integration of the backend \solver{} into the \minisearch{} system, or it drives the solver through the regular text file based \flatzinc{} interface.
This leads to a significant communication overhead.
To address these two issues, we propose to keep modelling \glspl{neighbourhood} as predicates, but define \gls{meta-optimisation} algorithm from an imperative perspective.
To address these two issues, we propose to keep modelling \glspl{neighbourhood} as predicates, but define \gls{meta-optimization} algorithm from an imperative perspective.
We define a few additional \minizinc{} \glspl{annotation} and functions that
\begin{itemize}
\item allow us to express important aspects of the meta-optimisation algorithm in a more convenient way,
\item allow us to express important aspects of the meta-optimization algorithm in a more convenient way,
\item and enable a simple \gls{rewriting} scheme that requires no additional communication with and only small, simple extensions of the target \solver{}.
\end{itemize}
\subsection{Restart Annotation}
Instead of the complex \minisearch{} definitions, we propose to add support for \glspl{meta-optimisation} that are purely based on the notion of \glspl{restart}.
Instead of the complex \minisearch{} definitions, we propose to add support for \glspl{meta-optimization} that are purely based on the notion of \glspl{restart}.
A \gls{restart} happens when a solver abandons its current search efforts, returns to its initial \gls{search-space}, and begins a new exploration.
Many \gls{cp} \solvers{} already provide support for controlling their restarting behaviour.
They can periodically restart after a certain number of nodes, or restart for every \gls{sol}.
@ -103,7 +103,7 @@ Finally, the \mzninline{restart_limit} stops the search after a fixed number of
\subsection{Advanced Meta-Optimisation Algorithms}
Although using just a \gls{restart} \glspl{annotation} by themselves allows us to run the basic \gls{lns}, more advanced \gls{meta-optimisation} algorithms will require more than reapplying the same \gls{neighbourhood} time after time.
Although using just a \gls{restart} \glspl{annotation} by themselves allows us to run the basic \gls{lns}, more advanced \gls{meta-optimization} algorithms will require more than reapplying the same \gls{neighbourhood} time after time.
It is, for example, often beneficial to use several \gls{neighbourhood} definitions within the same \instance{}.
Different \glspl{neighbourhood} may be able to improve different aspects of a \gls{sol} at different phases of the search.
Adaptive \gls{lns} \autocite{ropke-2006-adaptive, pisinger-2007-heuristic} is a variant of \gls{lns} that keeps track of the \glspl{neighbourhood} that led to improvements and favours them for future iterations.
@ -115,7 +115,7 @@ In this way, the \minisearch{} strategy can store values to be used in later ite
We use the ``\solver{} state'' instead.
All values are stored in normal \variables{}.
We then access them using two simple functions that reveal the solver state of the previous \gls{restart}.
This approach is sufficient for expressing many \gls{meta-optimisation} algorithms, and its implementation is much simpler.
This approach is sufficient for expressing many \gls{meta-optimization} algorithms, and its implementation is much simpler.
\paragraph{State access and initialisation}
@ -194,7 +194,7 @@ For adaptive \gls{lns}, a simple strategy is to change the size of the \gls{neig
\subsection{Optimisation strategies}
The \gls{meta-optimisation} algorithms we have seen so far rely on the default behaviour of \minizinc{} \solvers{} to use \gls{bnb} for optimisation: when a new \gls{sol} is found, the \solver{} adds a \constraint{} to the remainder of the search to only accept better \glspl{sol}, as defined by the \gls{objective} in the \mzninline{minimize} or \mzninline{maximize} clause of the \mzninline{solve} item.
The \gls{meta-optimization} algorithms we have seen so far rely on the default behaviour of \minizinc{} \solvers{} to use \gls{bnb} for optimisation: when a new \gls{sol} is found, the \solver{} adds a \constraint{} to the remainder of the search to only accept better \glspl{sol}, as defined by the \gls{objective} in the \mzninline{minimize} or \mzninline{maximize} clause of the \mzninline{solve} item.
When combined with \glspl{restart} and \gls{lns}, this is equivalent to a simple hill-climbing meta-heuristic.
We can use the constructs introduced above to implement alternative meta-heuristics such as simulated annealing.
@ -214,7 +214,7 @@ A more interesting example is a simulated annealing strategy.
When using this strategy, the \glspl{sol} that the \solver{} finds are no longer required to steadily improve in quality.
Instead, we ask the \solver{} to find a \gls{sol} that is a significant improvement over the previous \gls{sol}.
Over time, we decrease the amount by which we require the \gls{sol} needs to improve until we are just looking for any improvements.
This \gls{meta-optimisation} can help improve the qualities of \gls{sol} quickly and thereby reaching the \gls{opt-sol} quicker.
This \gls{meta-optimization} can help improve the qualities of \gls{sol} quickly and thereby reaching the \gls{opt-sol} quicker.
\Cref{lst:inc-sim-ann} show how this strategy is also easy to express using \gls{rbmo}.
\begin{listing}
@ -254,7 +254,7 @@ Our search is thus complete.
There is not always a clear order of importance for different \glspl{objective} in a problem.
We can instead look for a number of diverse \glspl{sol} and allow the user to pick the most acceptable options.
The predicate in \cref{lst:inc-pareto} shows a \gls{meta-optimisation} for the Pareto optimality of a pair of \glspl{objective}.
The predicate in \cref{lst:inc-pareto} shows a \gls{meta-optimization} for the Pareto optimality of a pair of \glspl{objective}.
\begin{listing}
\mznfile{assets/listing/inc_pareto.mzn}
@ -274,7 +274,7 @@ Before each \gls{restart} we add \constraints{} removing Pareto dominated \glspl
The \glspl{neighbourhood} defined in the previous section can be executed with \minisearch{} by adding support for the \mzninline{status} and \mzninline{last_val} built-in functions, and by defining the main \gls{restart} loop.
The \minisearch{} evaluator will then call a \solver{} to produce a \gls{sol}, and evaluate the \gls{neighbourhood} predicate, incrementally producing new \flatzinc{} to be added to the next round of solving.
While this is a viable approach, our goal is to keep \gls{rewriting} and solving separate, by embedding the entire \gls{meta-optimisation} algorithms into the \gls{slv-mod}.
While this is a viable approach, our goal is to keep \gls{rewriting} and solving separate, by embedding the entire \gls{meta-optimization} algorithms into the \gls{slv-mod}.
This section introduces such a \gls{rewriting} approach.
It only requires simple modifications of the \gls{rewriting} process, and the produced \gls{slv-mod} can be executed by standard \gls{cp} \solvers{} with a small set of simple extensions.
@ -290,7 +290,7 @@ This essentially \textbf{turns the new functions into \gls{native} \constraints{
The \gls{neighbourhood} predicate can then be added as a \constraint{} to the \gls{slv-mod}.
The evaluation is performed by hijacking the solver's own capabilities: It will automatically perform the evaluation of the new functions by propagating the \gls{native} \constraints{}.
To compile a \gls{meta-optimisation} algorithms to a \gls{slv-mod}, the \gls{rewriting} process performs the following four simple steps.
To compile a \gls{meta-optimization} algorithms to a \gls{slv-mod}, the \gls{rewriting} process performs the following four simple steps.
\begin{enumerate}
\item It replaces the annotation \mzninline{::on_restart("X")} with a call to predicate \mzninline{X}.
@ -363,7 +363,7 @@ Note that the \gls{slv-mod} will actually include the \mzninline{complete_reif}
\subsection{Solver Support for Restart Based Native Constraints}
We will now show the minimal extensions required from a \solver{} to interpret the new \gls{native} \constraints{} and, consequently, to execute \gls{meta-optimisation} definitions expressed in \minizinc{}.
We will now show the minimal extensions required from a \solver{} to interpret the new \gls{native} \constraints{} and, consequently, to execute \gls{meta-optimization} definitions expressed in \minizinc{}.
First, the \solver{} needs to parse and support the \gls{restart} \glspl{annotation} of \cref{lst:inc-restart-ann}.
Many \solvers{} already support all this functionality.
@ -390,7 +390,7 @@ Typically, this means the solver must not propagate these \constraints{} until i
Modifying a solver to support this functionality is straightforward if it already has a mechanism for posting \constraints{} during restarts.
We have implemented these extensions for both \gls{gecode} (214 new lines of code) and \gls{chuffed} (173 new lines of code).
For example, consider the model from \cref{lst:inc-basic-complete} again that uses the following \gls{meta-optimisation} algorithms.
For example, consider the model from \cref{lst:inc-basic-complete} again that uses the following \gls{meta-optimization} algorithms.
\begin{mzn}
basic_lns(uniform_neighbourhood(x, 0.2))}
@ -438,7 +438,7 @@ The \mzninline{sol} \constraints{} will simply not propagate anything in case no
\section{An Incremental Constraint Modelling Interface}%
\label{sec:inc-incremental-compilation}
A universal approach to the incremental usage of \cmls{}, and \gls{meta-optimisation}, is to allow incremental changes to an \instance{} of a \cmodel{}.
A universal approach to the incremental usage of \cmls{}, and \gls{meta-optimization}, is to allow incremental changes to an \instance{} of a \cmodel{}.
To solve these changing \instances{}, they have to be rewritten repeatedly to \glspl{slv-mod}.
In this section we extend our architecture with an incremental constraint modelling interface that allows the modeller to change an \instance{}.
For changes made using this interface, the architecture can employ \gls{incremental-rewriting} to minimise the required \gls{rewriting}.
@ -553,7 +553,7 @@ We therefore define the following three interfaces, using which we can apply the
In this section we present two experiments to test the efficiency and potency of the incremental methods introduced in this chapter.
In our first experiment, we consider the effectiveness \gls{meta-optimisation} within during solving.
In our first experiment, we consider the effectiveness \gls{meta-optimization} within during solving.
In particular, we investigate a round-robin \gls{lns} implemented using \gls{rbmo}.
On three different \minizinc{} models we compare this approach with solving the \instances{} directly using 2 different \solvers{}.
For one of the \solvers{}, we also compare with an oracle approach that can directly apply the exact same \gls{neighbourhood} as our \gls{rbmo}, without the need for computation.
@ -570,15 +570,15 @@ A description of the used computational environment, \minizinc{} instances, and
\subsection{Meta-Optimisation Solving}
We will now show that a solver that evaluates the rewritten \gls{meta-optimisation} specifications can (a) be effective and (b) incur only a small overhead compared to a dedicated implementation of the \glspl{neighbourhood}.
We will now show that a solver that evaluates the rewritten \gls{meta-optimization} specifications can (a) be effective and (b) incur only a small overhead compared to a dedicated implementation of the \glspl{neighbourhood}.
To measure the overhead, we implemented \gls{rbmo} in \gls{gecode} \autocite{gecode-2021-gecode}.
The resulting solver has been instrumented to also output the \domains{} of all \variables{} after propagating the new \gls{native} \constraints{}.
We implemented another extension to \gls{gecode} that simply reads the stream of \variable{} \domains{} for each \gls{restart}, essentially ``replaying'' the \gls{meta-optimisation} without incurring any overhead for evaluating the \glspl{neighbourhood} or handling the additional \variables{} and \constraints{}.
Note that this is a conservative estimate of the overhead: this extension has to perform less work than any real \gls{meta-optimisation} implementation.
We implemented another extension to \gls{gecode} that simply reads the stream of \variable{} \domains{} for each \gls{restart}, essentially ``replaying'' the \gls{meta-optimization} without incurring any overhead for evaluating the \glspl{neighbourhood} or handling the additional \variables{} and \constraints{}.
Note that this is a conservative estimate of the overhead: this extension has to perform less work than any real \gls{meta-optimization} implementation.
In addition, we also present benchmark results for the standard release of \gls{gecode}; as well as the development version of \gls{chuffed}; and \gls{chuffed} performing \gls{rbmo}.
These experiments illustrate that the \gls{meta-optimisation} implementations indeed perform well compared to the standard \solvers{}.
These experiments illustrate that the \gls{meta-optimization} implementations indeed perform well compared to the standard \solvers{}.
We ran experiments for three models from the MiniZinc Challenge \autocite{stuckey-2010-challenge, stuckey-2014-challenge} (\texttt{gbac}, \texttt{steelmillslab}, and \texttt{rcpsp-wet}).
The \instances{} are rewritten using the \minizinc{} 2.5.5 \compiler{}, adjusted to rewrite \gls{rbmo} specifications.
@ -694,7 +694,7 @@ However, \gls{lns} makes \gls{chuffed} much more robust.
\subsection{Incremental Rewriting and Solving}
To demonstrate the advantage that dedicated incremental methods for \minizinc{} offer, we present a runtime evaluation of two \gls{meta-optimisation} algorithms implemented using the two methods presented in this chapter.
To demonstrate the advantage that dedicated incremental methods for \minizinc{} offer, we present a runtime evaluation of two \gls{meta-optimization} algorithms implemented using the two methods presented in this chapter.
We consider the use of lexicographic search and round-robin \gls{lns}, each evaluated on a different \minizinc{} model.
We compare our methods against a naive system that repeatedly programmatically changes an \instance{}, rewrites the full \instances{}, and starts a new \solver{} instance.
The solving in our tests is performed by the \gls{gecode} \gls{solver}.
@ -750,7 +750,7 @@ The advantage in solve time using \gls{rbmo} is more pronounced here, but it is
The results of our experiments show that there is a clear benefit from the use of incremental methods in \cmls{}.
The \gls{meta-optimisation} algorithms that can be applied using \gls{rbmo} show a significant improvement over the \solvers{} normal search.
The \gls{meta-optimization} algorithms that can be applied using \gls{rbmo} show a significant improvement over the \solvers{} normal search.
It is shown this method is very efficient and does not under perform even when compared to a unrealistic version of the methods that do not require any computations.
The incremental interface offers a great alternative when \solvers{} are not extended for \gls{rbmo}.

View File

@ -1,13 +1,13 @@
\noindent{}In previous chapters we explored \gls{rewriting} as a definitive linear process, but to solve real-world problems \gls{meta-optimisation} algorithms are often used.
\noindent{}In previous chapters we explored \gls{rewriting} as a definitive linear process, but to solve real-world problems \gls{meta-optimization} algorithms are often used.
These algorithms usually require solving similar problems repeatedly, with only slight modifications, thousands of times.
Examples of these methods are:
\begin{itemize}
\item Multi-objective search \autocite{jones-2002-multi-objective}.
Optimising multiple objectives is often not supported directly in solvers.
Instead, it can be solved using a \gls{meta-optimisation} approach: find a solution to a (single-objective) problem, then add more \constraints{} to the original problem and repeat.
Instead, it can be solved using a \gls{meta-optimization} approach: find a solution to a (single-objective) problem, then add more \constraints{} to the original problem and repeat.
\item \gls{lns} \autocite{shaw-1998-local-search}.
This is a very successful \gls{meta-optimisation} algorithm to quickly improve solution quality.
This is a very successful \gls{meta-optimization} algorithm to quickly improve solution quality.
After finding a (sub-optimal) solution to a problem, \constraints{} are added to restrict the search in the \gls{neighbourhood} of that \gls{sol}.
When a new \gls{sol} is found, the \constraints{} are removed, and \constraints{} for a new \gls{neighbourhood} are added.
\item Online Optimisation \autocite{jaillet-2021-online}.
@ -32,7 +32,7 @@ In this chapter we introduce two methods to provide this support:
\begin{itemize}
\item We introduce the notion of \gls{rbmo} algorithms.
Using a minimal extension to a \cml{} and its target \solver{}, we can model \gls{meta-optimisation} methods and rewrite them into efficient \glspl{slv-mod}.
Using a minimal extension to a \cml{} and its target \solver{}, we can model \gls{meta-optimization} methods and rewrite them into efficient \glspl{slv-mod}.
The \solver{} will then incrementally execute the methods through the use of \solver{} \glspl{restart}
This method avoids the need of repeatedly \gls{rewriting} the \instance{} all together.
\item Alternatively, we extend our architecture with an incremental interface for adding and removing \constraints{}.
@ -42,7 +42,7 @@ In this chapter we introduce two methods to provide this support:
The rest of the chapter is organised as follows.
\Cref{sec:inc-modelling} discusses the declarative modelling of \gls{rbmo} methods in a \cml{}.
\Cref{sec:inc-solver-extension} introduces the method to rewrite these \gls{meta-optimisation} definitions into efficient \glspl{slv-mod} and the minimal extension required from the target \gls{solver}.
\Cref{sec:inc-solver-extension} introduces the method to rewrite these \gls{meta-optimization} definitions into efficient \glspl{slv-mod} and the minimal extension required from the target \gls{solver}.
\Cref{sec:inc-incremental-compilation} introduces the alternative method that extends our architecture with an incremental \constraint{} modelling interface.
\Cref{sec:inc-experiments} reports on the experimental results of both approaches.

View File

@ -8,8 +8,8 @@ They allow the modeller to reason at a high-level when specifying the problem.
Although the importance of creating \glspl{slv-mod} that can be solved efficiently is well-known, changes in the usage of these languages are raising questions about the time required to perform this transformation.
First, \constraint{} models are now often solved using low-level \solvers{}, such as \gls{mip} and \gls{sat}.
\Glspl{slv-mod} for these \solvers{} are more sizeable and the process to translate a high-level \cmodel{} into the low-level format required by these \solvers{} is complex.
Second, we have seen a development of the use of \gls{meta-optimisation}.
\Gls{meta-optimisation} is often implemented by incrementally adjusting \instances{}, after which those \instances{} have to be rewritten to be solved.
Second, we have seen a development of the use of \gls{meta-optimization}.
\Gls{meta-optimization} is often implemented by incrementally adjusting \instances{}, after which those \instances{} have to be rewritten to be solved.
Both these methods put additional pressure on the \gls{rewriting} process, which can often be a bottleneck in the overall process.
In this thesis, we have explored in detail and presented improvements to the process of \gls{rewriting} \instances{} of \cmodels{} into \glspl{slv-mod}.
@ -102,12 +102,12 @@ In this form the propagator would set the \gls{cvar} to \mzninline{true} if the
The downside of this form is that it is no longer equivalent to a logical implication (and, for example, \gls{chain-compression} would no longer be applicable), but \glspl{propagator} for this form are still easy to design/implement and they ensure that the \gls{cvar} is fixed through \gls{propagation}.
Finally, the usage of automated \gls{half-reif} in \minizinc{} will allow new solving performance improvements by allowing \solver{} implementers to experiment with \glspl{decomp} or \glspl{propagator} for \gls{half-reified} \constraints{}.
\paragraph{Incremental Solving} Using a \cml{} as the interface for a \gls{meta-optimisation} toolchain can be very intuitive and open up new opportunities.
\paragraph{Incremental Solving} Using a \cml{} as the interface for a \gls{meta-optimization} toolchain can be very intuitive and open up new opportunities.
The modeller can describe the process as a changing \cmodel{}.
The overhead of the repeated \gls{rewriting} process to arrive at a \gls{slv-mod} can, however, be a hindrance.
\Cref{ch:incremental} presents our final contribution: \textbf{two methods to practically eliminate the overhead of using \cmls{} in \gls{meta-optimisation}}.
\Cref{ch:incremental} presents our final contribution: \textbf{two methods to practically eliminate the overhead of using \cmls{} in \gls{meta-optimization}}.
Our primary method, \gls{rbmo}, allows modellers to describe \gls{meta-optimisation} algorithms, the iterative changes to their model, as part of their \minizinc{} model.
Our primary method, \gls{rbmo}, allows modellers to describe \gls{meta-optimization} algorithms, the iterative changes to their model, as part of their \minizinc{} model.
These descriptions are rewritten into a small set of new \gls{native} \constraints{} that the \solver{} has to support.
Although this method requires \solvers{} to be slightly extended, it eliminates the need for repeated \gls{rewriting}.
Only a single \gls{slv-mod} is created.
@ -115,14 +115,14 @@ The changes to the \gls{slv-mod} are iteratively applied within the \solver{}.
In our experiments, we have shown that his method is highly effective.
Even compared to an ``oracle'' approach, where the changes are merely read and not computed, this approach is only slightly slower.
Meanwhile, the time required to rewrite the \gls{meta-optimisation} descriptions is negligible.
Meanwhile, the time required to rewrite the \gls{meta-optimization} descriptions is negligible.
It is not always possible to extend a \solver{}, so for these cases we have defined a second method.
This method significantly reduces the overhead of \gls{rewriting} when incrementally changing \instances{}.
We have defined an incremental interface for \cmls{}.
A modeller can repeatedly add \constraints{} and \variables{} to an \instance{} and, crucially, the additions to the \instance{} can be retracted in reverse order through the use of a \gls{trail}.
Each of these changes to the \instance{} is incrementally applied to the \gls{slv-mod}.
Since multiple iterations of \gls{meta-optimisation} typically share large parts of their \instance{}, this significantly reduces the amount of work required in the \gls{rewriting} process.
Since multiple iterations of \gls{meta-optimization} typically share large parts of their \instance{}, this significantly reduces the amount of work required in the \gls{rewriting} process.
As an additional improvement, the changes observed in the \gls{slv-mod} can be incrementally applied within the \solver{}.
Ideally, the \solver{} can fully support the incremental changes made to the \gls{slv-mod}.
@ -134,14 +134,14 @@ Our experiments show that this method is not as effective as \gls{rbmo}.
It is, however, still a significant improvement over repeatedly \gls{rewriting} the full \instance{} that can be employed even in cases where \gls{rbmo} cannot be used.
The improvements offered by these new methods may spark future research.
Primarily, they will allow and promote the usage of \gls{meta-optimisation} methods in \cmls{} for new problems.
Primarily, they will allow and promote the usage of \gls{meta-optimization} methods in \cmls{} for new problems.
It could even be worthwhile to revisit existing applications of incremental constraint modelling.
The utilisation of the presented methods might offer a significant improvement in performance, allowing the solving of more complex problems.
Finally, new \gls{meta-optimisation} techniques could require extensions of the methods presented.
Finally, new \gls{meta-optimization} techniques could require extensions of the methods presented.
\paragraph{Summary} In conclusion, this thesis presented an architecture for the \gls{rewriting} of \cmls{}.
This architecture has been designed with modern usages of these languages in mind, such as \gls{rewriting} for low-level \solvers{} and \gls{meta-optimisation}.
This architecture has been designed with modern usages of these languages in mind, such as \gls{rewriting} for low-level \solvers{} and \gls{meta-optimization}.
It is efficient and incremental, but easily extensible with many simplification techniques.
It also includes a formal framework to reason about \gls{reif} that is able to introduce \glspl{half-reif}.
Finally, we also presented a novel method to specify a \gls{meta-optimisation} method to be executed within a \solver{}.
Finally, we also presented a novel method to specify a \gls{meta-optimization} method to be executed within a \solver{}.
Together, these contributions make \cmls{} a more powerful tool to help solve complex problems.

View File

@ -33,7 +33,7 @@ This repository contains the flattening, recursive function, and the incremental
This repository contains both the benchmark for the \gls{chuffed} propagators of \gls{half-reif} \constraints{} and the \minizinc{} Challenge 2019 \& 2020 benchmark.
\paragraph{Restarting Meta-Search} Finally, the design of the experiments for restart based \gls{meta-optimisation} can be found in:
\paragraph{Restarting Meta-Search} Finally, the design of the experiments for restart based \gls{meta-optimization} can be found in:
\begin{center}
\url{https://github.com/Dekker1/on-restart-benchmarks}