Skip to content

Commit

Permalink
Optimization 2015 lecture added
Browse files Browse the repository at this point in the history
  • Loading branch information
Marc Toussaint committed Dec 3, 2019
1 parent 85ca577 commit 17d5cd6
Show file tree
Hide file tree
Showing 54 changed files with 9,554 additions and 6 deletions.
365 changes: 365 additions & 0 deletions Optimization/01-introduction.tex
Original file line number Diff line number Diff line change
@@ -0,0 +1,365 @@
\input{../shared/shared}

\renewcommand{\course}{Optimization}
\renewcommand{\coursepicture}{optim}
\renewcommand{\coursedate}{Summer 2015}

\renewcommand{\topic}{Introduction}
\renewcommand{\keywords}{}

\slides

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Why Optimization is interesting!}{

\item {\small
In an otherwise unfortunate interview I've been asked why ``we guys''
(AI, ML, optimal control people) always talk about
optimality. ``People are by no means optimal'', the interviewer
said. I think that statement pinpoints the whole misunderstanding of
the role and concept of optimality principles.}

\begin{items}
\item \emph{Optimality principles are a means of scientific (or engineering)
description.}
\item It is often easier to describe a thing (natural or artifical)
via an optimality priciple than directly
\end{items}

~

\item Which science does \emph{not} use optimality principles to
describe nature \& artifacts?
\begin{items}
\item Physics, Chemistry, Biology, Mechanics, ...
\item Operations research, scheduling, ...
\item Computer Vision, Speach Recognition, Machine Learning, Robotics, ...
\end{items}

~

\item Endless applications

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Teaching optimization}{

\item Standard: \emph{Convex Optimization, Numerical Optimization}

\item Discrete Optimization~ (Stefan Funke)

\item Exotics: Evolutionary Algorithms, Swarm optimization, etc

~\mypause

\item In this lecture I try to cover the standard topics, but include
as well work on stochastic search \& global optimization

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\key{Types of optimization problems}
\slide{Rough Types of Optimization Problems}{

\item Generic optimization problem:

Let $x\in\RRR^n,~ f:~ \RRR^n \to \RRR,~ g:~ \RRR^n \to \RRR^m,~
h: \RRR^n\to\RRR^l$. Find
\begin{align*}
\min_x\quad & f(x)\\
\st & g(x)\le 0\comma h(x)=0
\end{align*}

\item \textbf{Blackbox}: only $f(x)$ can be evaluated

\item \textbf{Gradient}: $\na f(x)$ can be evaluated

{\tiny
\item Gauss-Newton type: $f(x) = \phi(x)^\T \phi(x)$ and
$\na \phi(x)$ can be evaluated
}

\item \textbf{2nd order}: $\he f(x)$ can be evaluated

~

\item ``Approximate upgrade'':

-- Use samples of $f(x)$ to approximate $\na f(x)$ locally

-- Use samples of $\na f(x)$ to approximate $\he f(x)$ locally

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Optimization in Machine Learning: SVMs}{

\item optimization problem

$\max_{\b, \norm{\b}=1} M$ \hspace{0.5cm} subject to $y_i (\phi(x_i)^\T
\beta) \ge M, \quad i=1,\dots,n$

\item can be rephrased as

$\min_{\b}\norm{\b}$ \hspace{0.5cm} subject to $y_i (\phi(x_i)^\T \beta) \ge
1, \quad i=1,\dots,n$

\emph{Ridge regularization} like ridge regression, but different loss

{
\showh[.35]{svm_trenngeraden}
\hspace{1cm}
\showh[.45]{svm_margin}
}

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Optimization in Robotics}{

\item Trajectories:

Let $x_t \in\RRR^n$ be a joint configuration and $x = x_{1:T} =
(x_1,\ldots,x_T)$ a trajectory of length $T$. Find
\begin{align} \label{eqOpt}
\begin{split}
\min_{x}\quad
& \sum_{t=0}^{T} f_t(x_{t-k:t})^\T f_t(x_{t-k:t})
%~+~ \sum_{t,t'} k(t,t') x_t^\T x_{t'}
\\
\st
& \forall_t:~ g_t(x_t) \le 0\comma h_t(x_t) = 0
\end{split}
\end{align}


\item Control:
\begin{align}
\min_{u, \ddot q, \l}\quad
& \norm{u-a}^2_H \\
\st
& u = M \ddot q + h + J_g^\T \lambda \\
& J_\phi\ddot q = c \\
& \l = \l^* \\
& J_g\ddot q = b
\end{align}

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Optimization in Computer Vision}{

~

\item Andres Bruhn's lectures

\item Flow estimation, (relaxed) min-cut problems, segmentation, ...

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%% \slide{}{

%% \tiny
%% log-barrier\\
%% simplex\\
%% particle swarm\\
%% MCMC (simulated annealing)\\
%% (L)BFGS\\
%% blackbox stochastic search\\
%% Newton\\
%% Rprop\\
%% EM\\
%% primal/dual\\
%% greedy\\
%% (conj.) gradients\\
%% KKT\\
%% line search \\
%% linear/quadratic programming\\

%% }

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%% \slide{}{

%% This is the first time I give the lecture!

%% ~

%% \item It'll be improvised

%% \item You can tell me what to include

%% }

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Planned Outline}{
\small
\item Unconstrained Optimization: Gradient- and 2nd order methods
\begin{items}
\item stepsize \& direction, plain gradient descent, steepest descent, line
search \& trust region methods, conjugate gradient

\item Newton, Gauss-Newton, Quasi-Newton, (L)BFGS
\end{items}


\item Constrained Optimization
\begin{items}
\item log barrier, squared penalties, augmented Lagrangian

\item Lagrangian, KKT conditions, Lagrange dual, log barrier
$\oto$ approx.\ KKT
\end{items}




\item Special convex cases
\begin{items}
\item Linear Programming, (sequential) Quadratic Programming
\item Simplex algorithm
\item Relaxation of integer linear programs
\end{items}

\item Global Optimization
\begin{items}
\item infinite bandits, probabilistic modelling, exploration vs.\
exploitation, GP-UCB
\end{items}

\item Stochastic search
\begin{items}
\item Blackbox optimization (0th order methods), MCMC, downhill simplex
\end{items}


}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%% \slide{Planned benchmark problems used in exercises}{

%% \item Analytic functions (scalable to very large $n$, controlled conditioning)

%% -- quadratic: $f(x) = x^\T A x$

%% -- convex: $f(x) = c^\T x - \sum_{i=1}^m \log(b_i + a_i^\T x)$,
%% $c,a_i\sim\NN(0,1)$, $b_i\sim \UU[.3,3]$

%% -- non-convex: I'll think of some mean ones

%% ~

%% \item Robotics path planning

%% -- collision constrained, or ``unconstrained''

%% ~

%% \item Planned: Convolutional neural net to detect faces in images

%% }

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Books}{

~

\twocol{.4}{.5}{
\show{bv_cvxbook_cover.jpg}
}{

Boyd and Vandenberghe: \emph{Convex Optimization.}

\url{http://www.stanford.edu/~boyd/cvxbook/}

}

~

~

\hfill \tiny(this course will not go to the full depth in math of Boyd et al.)

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Books}{

~

\twocol{.4}{.5}{
\show{nocedal-wright.png}
}{

Nocedal \& Wright: \emph{Numerical Optimization}

\url{www.bioinfo.org.cn/~wangchao/maa/Numerical_Optimization.pdf}

}

~

~

%\hfill \tiny(this course will not go to the full depth in math of Boyd et al.)

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slide{Organisation}{\label{lastpage}

\item Webpage:

\cen{\tiny\url{http://ipvs.informatik.uni-stuttgart.de/mlr/marc/teaching/15-Optimization/}}
\begin{items}
\item Slides, Exercises \& Software (C++)
\item Links to books and other resources
\end{items}

\item Admin things, please first ask:

Carola Stahl, {\tiny\tt
Carola.Stahl\@ipvs.uni-stuttgart.de}, Raum 2.217

~

\item Rules for the tutorials:

\begin{items}
\item Doing the exercises is crucial!

\item At the beginning of each tutorial:

-- sign into a list

-- mark which exercises you have (successfully) worked on

\item Students are randomly selected to present their solutions

\item {\color{red}You need 50\% of completed exercises to be allowed
to the exam}

\item Please check 2 weeks before the end of the term, if you can take
the exam
\end{items}

}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\slidesfoot
Loading

0 comments on commit 17d5cd6

Please sign in to comment.