GradientDescentStronglyConvex #
Main results #
This file mainly concentrates on the Gradient Descent algorithm for smooth strongly convex optimization problems.
We prove the O(ρ^k) rate for this algorithm.
theorem
Strong_convex_Lipschitz_smooth
{E : Type u_1}
[NormedAddCommGroup E]
[InnerProductSpace ℝ E]
[CompleteSpace E]
{f : E → ℝ}
{m : ℝ}
{f' : E → E}
{x : E}
{y : E}
{l : NNReal}
(hsc : StrongConvexOn Set.univ m f)
(mp : m > 0)
(hf : ∀ (x : E), HasGradientAt f (f' x) x)
(h₂ : LipschitzWith l f')
(hl : ↑l > 0)
:
theorem
lipschitz_derivxm_eq_zero
{E : Type u_1}
[NormedAddCommGroup E]
[InnerProductSpace ℝ E]
[CompleteSpace E]
{f : E → ℝ}
{f' : E → E}
{xm : E}
{l : NNReal}
(h₁ : ∀ (x : E), HasGradientAt f (f' x) x)
(h₂ : LipschitzWith l f')
(min : IsMinOn f Set.univ xm)
(hl : ↑l > 0)
:
f' xm = 0
theorem
gradient_method_strong_convex
{E : Type u_1}
[NormedAddCommGroup E]
[InnerProductSpace ℝ E]
[CompleteSpace E]
{f : E → ℝ}
{m : ℝ}
{f' : E → E}
{xm : E}
{x₀ : E}
(hsc : StrongConvexOn Set.univ m f)
{alg : Gradient_Descent_fix_stepsize f f' x₀}
(hm : m > 0)
(min : IsMinOn f Set.univ xm)
(step₂ : Gradient_Descent_fix_stepsize.a f f' x₀ ≤ 2 / (m + ↑(Gradient_Descent_fix_stepsize.l f f' x₀)))
(k : ℕ)
: