@inproceedings{rolet:inria-00437140,
hal_id = {inria-00437140},
url = {http://hal.inria.fr/inria-00437140},
title = {{Bandit-based Estimation of Distribution Algorithms for Noisy Optimization: Rigorous Runtime Analysis}},
author = {Rolet, Philippe and Teytaud, Olivier},
abstract = {{We show complexity bounds for noisy optimization, in frame- works in which noise is stronger than in previously published papers[19]. We also propose an algorithm based on bandits (variants of [16]) that reaches the bound within logarithmic factors. We emphasize the differ- ences with empirical derived published algorithms.}},
keywords = {noisy optimization evolutionary algorithms bandits},
language = {Anglais},
affiliation = {Laboratoire de Recherche en Informatique - LRI , TAO - INRIA Futurs , TAO - INRIA Saclay - Ile de France},
booktitle = {{Lion4}},
address = {Venice, Italie},
audience = {internationale },
year = {2010},
pdf = {http://hal.inria.fr/inria-00437140/PDF/lion4long.pdf},
}
@inproceedings{coulom:hal-00517157,
hal_id = {hal-00517157},
url = {http://hal.archives-ouvertes.fr/hal-00517157},
title = {{Handling Expensive Optimization with Large Noise}},
author = {Coulom, R{\'e}mi and Rolet, Philippe and Sokolovska, Nataliya and Teytaud, Olivier},
abstract = {{This paper exhibits lower and upper bounds on runtimes for expensive noisy optimization problems. Runtimes are expressed in terms of number of fitness evaluations. Fitnesses considered are monotonic transformations of the {\em sphere} function. The analysis focuses on the common case of fitness functions quadratic in the distance to the optimum in the neighborhood of this optimum---it is nonetheless also valid for any monotonic polynomial of degree p>2. Upper bounds are derived via a bandit-based estimation of distribution algorithm that relies on Bernstein races called R-EDA. It is known that the algorithm is consistent even in non-differentiable cases. Here we show that: (i) if the variance of the noise decreases to 0 around the optimum, it can perform optimally for quadratic transformations of the norm to the optimum, (ii) otherwise, it provides a slower convergence rate than the one exhibited empirically by an algorithm called Quadratic Logistic Regression based on surrogate models---although QLR requires a probabilistic prior on the fitness class.}},
keywords = {Noisy optimization, Bernstein races},
language = {Anglais},
affiliation = {SEQUEL - INRIA Lille - Nord Europe , TAO - INRIA Saclay - Ile de France , Laboratoire de Recherche en Informatique - LRI},
booktitle = {{Foundations of Genetic Algorithms (FOGA 2011)}},
pages = {TBA},
address = {Autriche},
editor = {ACM },
audience = {internationale },
year = {2011},
month = Jan,
pdf = {http://hal.archives-ouvertes.fr/hal-00517157/PDF/foga10noise.pdf},
}
2. Outline
Introduction
Lower bound
An intuitive solution: bandits
Rigorous upper-bound and rigorous solution
Rolet and Teytaud TRSH 09 is great 2
3. Introduction
Noisy optimization:
- the fitness function returns a noisy answer;
- this noisy answer is independent;
- goal: finding best expected value.
Rolet and Teytaud TRSH 09 is great 3
4. Main argument of this work
Usual algorithms don't work in noisy optimization.
The computational
power is like this ==>
<== and the result is like that
because algos are not consistent.
==> there is much to win, algorithms can be
greatly improved.
Rolet and Teytaud TRSH 09 is great 4
5. Previous works
Should we average or not ?
Or should we increase lambda ?
==> various answers in the literature.
Rolet and Teytaud TRSH 09 is great 5
6. Jebalia, Auger, PPSN 2008
.
.
==> scale-invariant 1+1-ES
converges linearly
Rolet and Teytaud TRSH 09 is great 6
7. Jebalia, Auger, PPSN 2008
.
.
In the present work:
- no lower-bound on fnoise(x)/f(x)
- ârealâ algorithm (not scale-invariant)
==> but slower rate (yet tight)
Rolet and Teytaud TRSH 09 is great 7
8. Introduction
Introduction
Lower bound
An intuitive solution: bandits
Rigorous upper-bound and rigorous solution
Rolet and Teytaud TRSH 09 is great 8
9. Lower bound: boring framework
Optimization
algorithm
Rolet and Teytaud TRSH 09 is great 9
10. Lower bound: boring framework
Noisy (binary) measurement
It's a lower bound. If it holds in the binary
case, it holds in the non-binary case either.
Rolet and Teytaud TRSH 09 is great 10
11. Lower bound: boring framework
Final loss
Rolet and Teytaud TRSH 09 is great 11
12. How to prove a lower bound ? Simple case...
Consider we are in dimension 2. Consider that you
solve the problem with precision î».
Consider a regular simplex of possible optima:
d(ti,tj)=î»
(here, for simplicity: deterministic algorithm)
Rolet and Teytaud TRSH 09 is great 12
13. How to prove a lower bound ? Simple case...
Visit point x
f(x,ti) = f(x,tj) ±î»
so with proba 1-î»,
f (x,ti) = f (x,tj)
noise noise
==> with proba 1-î»,
an iteration with optimum in ti
= an iteration with optimum in tj
Rolet and Teytaud TRSH 09 is great 13
14. How to prove a lower bound ? Simple case...
With proba 1-î»,
an iteration with optimum in ti
= an iteration with optimum in tj
With proba 1-Nî»,
a run with optimum in ti
= a run with optimum in tj
==> real case = similar to this one.
Rolet and Teytaud TRSH 09 is great 14
16. Introduction
Introduction
Lower bound
An intuitive solution: bandits
Rigorous upper-bound and rigorous solution
Rolet and Teytaud TRSH 09 is great 16
17. Idea of bandits
I have N arms. I have t time steps.
Pulling an arm yields a reward in [0,1].
Each arm has a stationary (i.i.d) reward.
At each time step I can pull an arm.
How will I find good arms ?
Rolet and Teytaud TRSH 09 is great 17
18. Idea of bandits
The goal of Bernstein races:
- guessing which arms are the most rewarding;
- whilst saving up time.
Rolet and Teytaud TRSH 09 is great 18
19. Bernstein race: general idea.
While (I want to go on)
{
I pull once each non discarded arm.
I compute a lower and upper bound for all
non-discarded arms (Bernstein bound).
I discard arms which are excluded by the
bounds.
}
Rolet and Teytaud TRSH 09 is great 19
20. Already used for noisy optimization
Idea:
- evolutionary algorithm (CMA)
- replacing selection by Bernstein race:
keep racing until î points are selected.
Trouble:
- sometimes very expensive iterations
- tricks are added in the algorithm, not stable
Rolet and Teytaud TRSH 09 is great 20
21. Our version
Idea:
- evolutionary algorithm;
- derandomized mutation ensuring that there are
at least two points with âsufficiently differentâ
values.
- replacing selection by Bernstein race:
keep racing until î points are significantly
better than î'other points. (î=1, î'=1)
Tricky part: derandomized mutation (one more step in
the algorithm; less simple);
Otherwise no trouble, proved, we don't have
to add tricks.
Rolet and Teytaud TRSH 09 is great 21
22. Introduction
Introduction
Lower bound
An intuitive solution: bandits
Rigorous upper-bound and rigorous
solution
Rolet and Teytaud TRSH 09 is great 22
23. Bernstein race for comparing two among three
arms with confidence 1-îș'
Rolet and Teytaud TRSH 09 is great 23
24. Bernstein race for comparing two among three
arms with confidence 1-îș'
Bernstein principle
(low variance)
Rolet and Teytaud TRSH 09 is great 24
25. Bernstein race for comparing 2 among 3 arms with
confidence 1-îș'
THE important part:
points are
signif. different!
Rolet and Teytaud TRSH 09 is great 25
26. The complete algorithm
The algorithm iteratively improves the set of
possible optima. Iteration n as follows:
1) Generate 3 points (equally spaced on a line).
2) Apply the Bernstein race until one of the arms (the
good arm) is statistically better than at least one
other arm (the bad arm) for some îș' = O(1/n2).
3) Remove the part of the domain which is farther
from the good arm than from the bad arm.
Sum of the îș' = îș ==> proof with confidence 1-îș.
Rolet and Teytaud TRSH 09 is great 26
28. Conclusion
Bandits = good tool for noisy optimization.
But taking care of how to generate points is necessary
(otherwise one might have points with very similar
fitness values which is dangerous!).
Further work:
* Generalizing the approach to more simple algorithms.
* Lower bound = upper bound up to constant factors
depending on the dimension
* Proof of slower rate with variance not decreasing
to zero at the optimum.
Rolet and Teytaud TRSH 09 is great 28
Hinweis der Redaktion
I am Frederic Lemoine, PhD student at the University Paris Sud. I will present you my work on GenoQuery, a new querying module adapted to a functional genomics warehouse