\documentclass[twoside]{article} \usepackage{amssymb, amsmath} % font used for R in Real numbers \pagestyle{myheadings} \markboth{Periodic solutions for evolution equations} {Mihai Bostan} \begin{document} \title{\vspace{-1in}\parbox{\linewidth}{\footnotesize\noindent {\sc Electronic Journal of Differential Equations}, Monogrpah 03, 2002. \newline ISSN: 1072-6691. URL: http://ejde.math.swt.edu or http://ejde.math.unt.edu \newline ftp ejde.math.swt.edu (login: ftp)} \vspace{\bigskipamount} \\ % Periodic solutions for evolution equations % \thanks{ {\em Mathematics Subject Classifications:} 34B05, 34G10, 34G20. \hfil\break\indent {\em Key words:} maximal monotone operators, evolution equations, Hille-Yosida's theory. \hfil\break\indent \copyright 2002 Southwest Texas State University. \hfil\break\indent Submitted May 14, 2002. Published August 23, 2002.} } \date{} % \author{Mihai Bostan} \maketitle \begin{abstract} We study the existence and uniqueness of periodic solutions for evolution equations. First we analyze the one-dimensional case. Then for arbitrary dimensions (finite or not), we consider linear symmetric operators. We also prove the same results for non-linear sub-differential operators $A = \partial \varphi$ where $\varphi$ is convex. \end{abstract} \newcommand{\projf}{\mathop{\rm Proj}\nolimits_{\overline{R(A)}}\langle f\rangle} \newtheorem{theorem}{Theorem}[section] \newtheorem{prop}[theorem]{Proposition} \newtheorem{remark}[theorem]{Remark} \newtheorem{defi}[theorem]{Definition} \tableofcontents \section{Introduction} Many theoretical and numerical studies in applied mathematics focus on permanent regimes for ordinary or partial differential equations. The main purpose of this paper is to establish existence and uniqueness results for periodic solutions in the general framework of evolution equations, $$\label{EvEq} x'(t) + Ax(t) = f(t), \quad t \in \mathbb{R},$$ by using the penalization method. Note that in the linear case a necessary condition for the existence is $$\label{NecCond} \langle f\rangle:=\frac{1}{T}\int_0 ^T f(t)dt \in \mathop{\rm Range}(A).$$ Unfortunately, this condition is not always sufficient for existence; see the example of the orthogonal rotation of $\mathbb{R}^2$. Nevertheless, the condition (\ref{NecCond}) is sufficient in the symmetric case. The key point consists of considering first the perturbed equation \begin{equation*} %\label{} \alpha x_\alpha (t) + x_\alpha '(t) + Ax_\alpha (t) = f(t), \quad t \in \mathbb{R}, \end{equation*} where $\alpha > 0$. By using the Banach's fixed point theorem we deduce the existence and uniqueness of the periodic solutions $x_\alpha, \alpha >0$. Under the assumption (\ref{NecCond}), in the linear symmetric case we show that $(x_\alpha )_{\alpha > 0}$ is a Cauchy sequence in $C^1$. Then by passing to the limit for $\alpha \to 0$ it follows that the limit function is a periodic solution for (\ref{EvEq}). These results have been announced in \cite{CRAS_EqEvPer}. The same approach applies for the study of almost periodic solutions (see \cite{Almost}). Results concerning this topic have been obtained previously by other authors using different methods. A similar condition (\ref{NecCond}) has been investigated in \cite{BreHar} when studying the range of sums of monotone operators. A different method consists of applying fixed point techniques, see for example \cite{Bre,Har}. This article is organized as follows. First we analyze the one dimensional case. Necessary and sufficient conditions for the existence and uniqueness of periodic solutions are shown. Results for sub(super)-periodic solutions are proved as well in this case. In the next section we show that the same existence result holds for linear symmetric maximal monotone operators on Hilbert spaces. In the last section the case of non-linear sub-differential operators is considered. \section{Periodic solutions for one dimensional evolution equations} To study the periodic solutions for evolution equations it is convenient to consider first the one dimensional case $$\label{1D} x'(t) + g(x(t)) = f(t), \quad t \in \mathbb{R},$$ where $g:\mathbb{R} \to \mathbb{R}$ is increasing Lipschitz continuous in $x$ and $f:\mathbb{R} \to \mathbb{R}$ is $T$-periodic and continuous in $t$. By Picard's theorem it follows that for each initial data $x(0) = x_0 \in \mathbb{R}$ there is an unique solution $x\in C^1(\mathbb{R};\mathbb{R})$ for (\ref{1D}). We are looking for $T$-periodic solutions. Let us start by the uniqueness study. \subsection{Uniqueness} \begin{prop} \label{1D_Uniq} Assume that $g$ is strictly increasing and $f$ is periodic. Then there is at most one periodic solution for (\ref{1D}). \end{prop} \paragraph{Proof} Let $x_1, x_2$ be two periodic solutions for (\ref{1D}). By taking the difference between the two equations and multiplying by $x_1(t) - x_2(t)$ we get $$\label{Decreas} \frac{1}{2}\frac{d}{dt}|x_1(t) - x_2(t)|^2 + [g(x_1(t))-g(x_2(t))][x_1(t) -x_2(t)] = 0, \quad t \in \mathbb{R}.$$ Since $g$ is increasing we have $(g(x_1)-g(x_2))(x_1-x_2) \geq 0$ for all $x_1,x_2\in \mathbb{R}$ and therefore we deduce that $|x_1(t) - x_2(t)|$ is decreasing. Moreover as $x_1$ and $x_2$ are periodic it follows that $|x_1(t) - x_2(t)|$ does not depend on $t\in \mathbb{R}$ and therefore, from (\ref{Decreas}) we get $$[g(x_1(t)) - g(x_2(t))][x_1(t) - x_2(t)] = 0, \quad t \in \mathbb{R}.$$ Finally, the strictly monotony of $g$ implies that $x_1 = x_2$. \begin{remark} \label{MultiSol} \rm If $g$ is only increasing, it is possible that (\ref{1D}) has several periodic solutions. Let us consider the function $$g(x) = \left \{ \begin{array}{ll} x+ \varepsilon & x < -\varepsilon,\\ 0 & x \in [-\varepsilon, \varepsilon],\\ x - \varepsilon & x > \varepsilon, \end{array} \right.$$ and $f(t) = \frac{\varepsilon}{2}\cos t$. We can easily check that $x_\lambda (t) = \lambda + \frac{\varepsilon}{2}\sin t$ are periodic solutions for (\ref{1D}) for $\lambda \in [-\frac{\varepsilon}{2}, \frac{\varepsilon}{2}]$. \end{remark} Generally we can prove that every two periodic solutions differ by a constant. \begin{prop} Let $g$ be an increasing function and $x_1, x_2$ two periodic solutions of (\ref{1D}). Then there is a constant $C \in \mathbb{R}$ such that $$x_1(t) - x_2(t) = C, \quad \forall t \in \mathbb{R}.$$ \end{prop} \paragraph{Proof} As shown before there is a constant $C \in \mathbb{R}$ such that $|x_1(t) - x_2(t) | = C$, $t \in \mathbb{R}$. Moreover $x_1(t) - x_2(t)$ has constant sign, otherwise $x_1(t_0) = x_2(t_0)$ for some $t_0 \in \mathbb{R}$ and it follows that $|x_1(t) - x_2(t)| = |x_1(t_0) - x_2(t_0)| = 0$, $t \in \mathbb{R}$ or $x_1 = x_2$. Finally we find that $$x_1(t) - x_2(t) = sign(x_1(0) - x_2(0)) C, \quad t \in \mathbb{R}.$$ Before analyzing in detail the uniqueness for increasing functions, let us define the following sets. $$\mathcal{O}(y) = \left \{ \begin{array}{ll} \big\{x\in \mathbb{R} : x + \int_0 ^t (f(s)-y)ds \in g^{-1}(y) \; \forall t \in \mathbb{R}\big\} \subset g^{-1}(y), & y \in g(\mathbb{R}),\\ \emptyset, & y \notin g(\mathbb{R}). \end{array} \right.$$ \begin{prop} %\label{} Let $g$ be an increasing function and $f$ periodic. Then equation (\ref{1D}) has different periodic solutions if and only if $Int (\mathcal{O}\langle f\rangle) \neq \emptyset$. \end{prop} \paragraph{Proof} Assume that (\ref{1D}) has two periodic solutions $x_1 \neq x_2$. By the previous proposition we have $x_2 - x_1 = C > 0$. By integration on $[0,T]$ one gets $$\label{I} \int_0 ^T g(x_1(t))dt = \int_0 ^T f(t)dt = \int_0 ^T g(x_2(t))dt.$$ Since $g$ is increasing we have $g(x_1(t)) \leq g(x_2(t)), \quad t\in \mathbb{R}$ and therefore, $$\label{II} \int_0 ^T g(x_1(t))dt \leq \int_0 ^T g(x_2(t))dt.$$ From (\ref{I}) and (\ref{II}) we deduce that $g(x_1(t)) = g(x_2(t)), \, t \in \mathbb{R}$ and thus $g$ is constant on each interval $[x_1(t), x_2(t)] = [x_1(t), x_1(t) + C], \,t \in \mathbb{R}$. Finally it implies that $g$ is constant on $\mathop{\rm Range}(x_1) + [0,C] = \{x_1(t) + y : t \in [0,T], y \in [0,C]\}$ and this constant is exactly the time average of $f$: \begin{equation*} g(x_1(t)) = g(x_2(t)) = \langle f\rangle, \quad t \in [0,T]. \end{equation*} Let $x$ be an arbitrary real number in $]x_1(0), x_1(0) + C[$. Then \begin{eqnarray} x + \int_0 ^t \{f(s) - \langle f\rangle\}ds & = & x - x_1(0) + x_1(0) + \int_0 ^t \{f(s) - g(x_1(s))\}ds \nonumber \\ & = & x - x_1(0) + x_1(t) \nonumber \\ &> & x_1(t), \quad t \in \mathbb{R}.\nonumber \end{eqnarray} Similarly, \begin{eqnarray} x + \int_0 ^t \{f(s) - \langle f\rangle\}ds & = & x - x_2(0) + x_2(0) + \int_0 ^t \{f(s) - g(x_2(s))\}ds \nonumber \\ & = & x - x_2(0) + x_2(t) \nonumber \\ & < & x_2(t), \quad t \in \mathbb{R}.\nonumber \end{eqnarray} Therefore, $x + \int_0 ^t \{f(s) - \langle f\rangle\}ds \in ]x_1(t), x_2(t)[ \subset g^{-1}(\langle f\rangle), \quad t \in \mathbb{R}$ which implies that $x \in \mathcal{O}\langle f\rangle$ and hence $]x_1(0), x_2(0)[ \subset \mathcal{O}\langle f\rangle$.\\ Conversely, suppose that there is $x$ and $C>0$ small enough such that $x, x+C \in \mathcal{O}\langle f\rangle$. It is easy to check that $x_1, x_2$ given below are different periodic solutions for (\ref{1D}): \begin{gather*} x_1(t) = x + \int_0 ^t \{f(s) - \langle f\rangle\}ds,\quad t \in \mathbb{R}, \\ x_2(t) = x + C + \int_0 ^t \{f(s) - \langle f\rangle\}ds = x_1(t) + C,\quad t \in \mathbb{R}. \end{gather*} \begin{remark} \rm The condition $Int(\mathcal{O}\langle f\rangle) \neq \emptyset$ is equivalent to $$\mathop{\rm diam}(g^{-1}\langle f\rangle) > \mathop{\rm diam} (\mathop{\rm Range}\int \{f(t) - \langle f\rangle\}dt ).$$ \end{remark} \paragraph{Example:} Consider the equation $x'(t) + g(x(t)) = \eta \cos t, t \in \mathbb{R}$ with $g$ given in Remark \ref{MultiSol}. We have $<\eta \cos t > = 0 \in g(\mathbb{R})$ and \begin{eqnarray} \mathcal{O}(0) & = & \{ x \in \mathbb{R} \,| \, x + \int_0 ^t \eta \cos s \,ds \in g ^{-1}(0), \quad t \in \mathbb{R} \}\\ & = & \{ x \in \mathbb{R} : x + \eta \sin t \in g^{-1}(0), \quad t \in \mathbb{R}\} \nonumber \\ & = & \{ x \in \mathbb{R} : -\varepsilon \leq x + \eta \sin t \leq \varepsilon, t \in \mathbb{R} \} \nonumber \\ & = & \left \{ \begin{array}{ll} \emptyset & |\eta |> \varepsilon,\\ \{0\} & |\eta |= \varepsilon,\\ \left [ |\eta |- \varepsilon, \varepsilon - |\eta |\right ] & | \eta |< \varepsilon. \end{array} \right. \end{eqnarray} Therefore, uniqueness does not occur if $|\eta |< \varepsilon$, for example if $\eta = \varepsilon/ 2$, as seen before in Remark \ref{MultiSol}. If $|\eta |\geq \varepsilon$ there is an unique periodic solution. In the following we suppose that $g$ is increasing and we establish an existence result. \subsection{Existence} To study the existence, note that a necessary condition is given by the following proposition. \begin{prop} \label{Prop1DNecCond} Assume that equation (\ref{1D}) has $T$-periodic solutions. Then there is $x_0\in \mathbb{R}$ such that $\langle f\rangle:=\frac{1}{T}\int_0 ^T f(t)dt = g(x_0)$. \end{prop} \paragraph{Proof} Integrating on a period interval $[0,T]$ we obtain \begin{equation*} %\label{} x(T) - x(0) + \int_0 ^ T g(x(t))dt = \int_0 ^ T f(t)dt. \end{equation*} Since $x$ is periodic and $g\circ x$ is continuous we get \begin{equation*} %\label{} Tg(x(\tau)) = \int_0 ^T f(t)dt, \quad \tau \in ]0,T[, \end{equation*} and hence $$\label{1DNecCond} \langle f\rangle := \frac{1}{T}\int_0 ^T f(t)dt \in \mathop{\rm Range}(g).$$ \quad\hfill$\diamondsuit$ In the following we will show that this condition is also sufficient for the existence of periodic solutions. We will prove this result in several steps. First we establish the existence for the equation $$\label{1DAbs} \alpha x_\alpha (t) + x_\alpha '(t) + g(x_\alpha(t)) = f(t), \quad t \in \mathbb{R},\quad \alpha >0.$$ \begin{prop} %\label{} Suppose that $g$ is increasing Lipschitz continuous and $f$ is $T$-periodic and continuous. Then for every $\alpha > 0$ the equation (\ref{1DAbs}) has exactly one periodic solution. \end{prop} \begin{remark} \rm Before starting the proof let us observe that (\ref{1DAbs}) reduces to an equation of type (\ref{1D}) with $g_\alpha = \alpha 1_{\mathbb{R}} + g$. Since $g$ is increasing, is clear that $g_\alpha$ is strictly increasing and by the Proposition \ref{1D_Uniq} we deduce that the uniqueness holds. Moreover since $\mathop{\rm Range}(g_\alpha) = \mathbb{R}$, the necessary condition (\ref{1DNecCond}) is trivially verified and therefore, in this case we can expect to prove existence. \end{remark} \paragraph{Proof} First of all remark that the existence of periodic solutions reduces to finding $x_0 \in \mathbb{R}$ such that the solution of the evolution problem $$\label{1DPerEvEq} \begin{gathered} \alpha x_\alpha (t) + x_\alpha '(t) + g(x_\alpha(t)) = f(t), \quad t \in [0,T],\\ x(0) = x_0, \end{gathered}$$ verifies $x(T\,; 0 , x_0) = x_0$. Here we denote by $x(\cdot\,; 0, x_0)$ the solution of (\ref{1DPerEvEq}) (existence and uniqueness assured by Picard's theorem). We define the map $S:\mathbb{R} \to \mathbb{R}$ given by $$\label{1Dmap} S(x_0) = x(T\,; 0, x_0), \quad x_0 \in \mathbb{R}.$$ We demonstrate the existence and uniqueness of the periodic solution of (\ref{1DPerEvEq}) by showing that the Banach's fixed point theorem applies. Let us consider two solutions of (\ref{1DPerEvEq}) corresponding to the initial datas $x_0 ^1$ and $x_0 ^2$. Using the monotony of $g$ we can write $$\alpha |x(t\,;0, x_0^1) - x(t\,;0, x_0^2) | ^2 + \frac{1}{2}\frac{d}{dt}|x(t\,;0,x_0^1) - x(t\,;0,x_0^2) |^2 \leq 0, \nonumber$$ which implies $$\frac{1}{2}\frac{d}{dt}\{e ^{2\alpha t} |x(t\,;0,x_0^1) - x(t\,;0,x_0^2)|^2\} \leq0,\nonumber$$ and therefore, $$|S(x_0^1) - S(x_0^2) | = |x(T\,;0,x_0^1) - x(T\,;0,x_0^2)| \leq e^{-\alpha T} |x_0^1 - x_0^2|.\nonumber$$ For $\alpha > 0$ $S$ is a contraction and the Banach's fixed point theorem applies. Therefore $S(x_0) = x_0$ for an unique $x_0 \in \mathbb{R}$ and hence $x(\cdot\,;0,x_0)$ is a periodic solution of (\ref{1D}).\hfill$\diamondsuit$ Naturally, in the following proposition we inquire about the convergence of $(x_\alpha)_{\alpha > 0}$ to a periodic solution of (\ref{1D}) as $\alpha \to 0$. In view of the Proposition \ref{Prop1DNecCond} this convergence does not hold if (\ref{1DNecCond}) is not verified. Assume for the moment that (\ref{1D}) has at least one periodic solution. In this case convergence holds. \begin{prop} If equation (\ref{1D}) has at least one periodic solution, then $(x_\alpha)_{\alpha > 0}$ is convergent in $C^0(\mathbb{R};\mathbb{R})$ and the limit is also a periodic solution of (\ref{1D}). \end{prop} \paragraph{Proof} Denote by $x$ a periodic solution of (\ref{1D}). By elementary calculations we find $$\alpha |x_\alpha (t) - x(t) |^2 + \frac{1}{2}\frac{d}{dt}|x_\alpha(t) - x(t) |^2 \leq -\alpha x(t)(x_\alpha(t) - x(t)),\quad t \in \mathbb{R},$$ which can be also written as $$\frac{1}{2}\frac{d}{dt}\{e^{2\alpha t} |x_\alpha (t) - x(t)|^2\} \leq \alpha e^{\alpha t } |x(t)|\cdot e^{\alpha t}|x_\alpha(t) - x(t)|, \quad t \in \mathbb{R}.$$ Therefore, by integration on $[0,t]$ we deduce $$\label{int} \frac{1}{2} \{e^{\alpha t}|x_\alpha(t) - x(t)|\}^2 \leq \frac{1}{2}|x_\alpha(0)- x(0)|^2 + \int_0 ^t \alpha e^{\alpha s}|x(s)| \cdot e^ {\alpha s}|x_\alpha(s) - x(s)|ds.$$ Using Bellman's lemma, formula (\ref{int}) gives $$\label{1DEstim} e^{\alpha t} |x_\alpha (t) - x(t) | \leq |x_\alpha (0) - x(0) | + \int_0 ^ t \alpha e ^{\alpha s}|x(s)|ds, \quad t \in \mathbb{R}.$$ Let us consider $\alpha > 0$ fixed for the moment. Since $x$ is periodic and continuous, it is also bounded and therefore from (\ref{1DEstim}) we get $$|x_\alpha(t) - x(t) |\leq e ^ {-\alpha t } |x_\alpha(0) - x(0)| + ( 1 - e^{-\alpha t})\|x\|_{L^{\infty}(\mathbb{R})}, \quad t \in \mathbb{R}.$$ By periodicity we have \begin{eqnarray} |x_\alpha (t) - x(t) | & = & |x_\alpha (nT + t) - x(nT + t) | \nonumber \\ & \leq & e ^ {-\alpha (nT + t)}|x_\alpha (0) - x(0)| + ( 1 - e ^{-\alpha (nT + t)})\|x\|_{L^{\infty}(\mathbb{R})}\nonumber \\ & \leq & e ^ {-\alpha (nT+t)}|x_\alpha (0) - x(0)| + \|x\|_{L^{\infty}(\mathbb{R})}, \quad t \in \mathbb{R}, n \geq 0.\nonumber \end{eqnarray} By passing to the limit as $n \to \infty$, we deduce that $(x_\alpha)_{\alpha > 0}$ is uniformly bounded in $L^{\infty}(\mathbb{R})$: $$|x_\alpha (t) | \leq |x_\alpha (t) - x(t) | + |x(t)| \leq 2\|x\|_{L^{\infty}(\mathbb{R})}, \quad t \in \mathbb{R}, \; \alpha > 0.$$ The derivatives $x'_\alpha$ are also uniformly bounded in $L^{\infty}(\mathbb{R})$ for $\alpha \to 0$: \begin{eqnarray*} \lefteqn{|x'_\alpha(t)| } \\ &= & |f(t) - \alpha x_\alpha(t) - g(x_\alpha (t))| \\ &\leq & \|f\|_{L^{\infty}(\mathbb{R})} + 2\alpha \|x\|_{L^{\infty}(\mathbb{R})} + \max \{g(2\|x\|_{L^{\infty}(\mathbb{R})}), -g(-2\|x\|_{L^{\infty}(\mathbb{R})})\}. \end{eqnarray*} The uniform convergence of $(x_\alpha)_{\alpha > 0}$ follows now from the Arzela-Ascoli's theorem. Denote by $u$ the limit of $(x_\alpha)_{\alpha > 0}$ as $\alpha \to 0$. Obviously $u$ is also periodic $$u(0) = \lim_{\alpha \to 0 } x_\alpha(0) = \lim_{\alpha \to 0 }x_\alpha(T) = u(T).$$ To prove that $u$ verifies (\ref{1D}), we write $$x_\alpha(t) = x_\alpha(0) + \int_0 ^t \{f(s) - g(x_\alpha (s) ) -\alpha x_\alpha(s)\}ds, \quad t \in \mathbb{R}.$$ Since the convergence is uniform, by passing to the limit for $\alpha \to 0$ we obtain $$u(t) = u(0) + \int_0 ^t \{f(s) - g(u(s))\}ds,$$ and hence $u \in C^1(\mathbb{R};\mathbb{R})$ and $$u'(t) + g(u(t)) = f(t), \quad t \in \mathbb{R}.$$ From the previous proposition we conclude that the existence of periodic solutions for (\ref{1D}) reduces to uniform estimates in $L^{\infty}(\mathbb{R})$ for $(x_\alpha)_{\alpha >0}$. \begin{prop} \label{UnifBound} Assume that $g$ is increasing Lipschitz continuous and $f$ is $T$-periodic and continuous. Then the following statements are equivalent:\\ (i) equation (\ref{1D}) has periodic solutions;\\ (ii) the sequence $(x_\alpha)_{\alpha >0}$ is uniformly bounded in $L^{\infty}(\mathbb{R})$. Moreover, in this case $(x_\alpha)_{\alpha >0}$ is convergent in $C^0(\mathbb{R};\mathbb{R})$ and the limit is a periodic solution for (\ref{1D}). \end{prop} Note that generally we can not estimate $(x_\alpha)_{\alpha >0}$ uniformly in $L^{\infty}(\mathbb{R})$. Indeed, by standard computations we obtain $$\alpha (x_\alpha(t) - u) ^2 + \frac{1}{2}\frac{d}{dt}(x_\alpha(t) - u )^2\leq |f(t) - \alpha u - g(u)|\cdot |x_\alpha (t) - u|,\quad t ,u \in \mathbb{R}$$ and therefore $$\frac{1}{2}\frac{d}{dt}\{e^{2\alpha t}(x_\alpha (t) - u )^2\}\leq e^{\alpha t} |f(t) - \alpha u -g(u)| \cdot e^{\alpha t} |x_\alpha (t) - u|,\quad t,u\in \mathbb{R}.$$ Integration on $[t,t+h]$, we get \begin{eqnarray} \frac{1}{2}e^{2\alpha(t+h)}(x_\alpha (t+h) - u)^2 & \leq & \int_t ^{t+h} e^{2\alpha s}|f(s) - \alpha u -g(u)|\cdot |x_\alpha(s) - u|ds \nonumber \\ & &+ \frac{1}{2}e ^ {2\alpha t} (x_\alpha (t) - u ) ^2, \quad t < t+h, \; u \in \mathbb{R}.\nonumber \end{eqnarray} Now by using Bellman's lemma we deduce $$|x_\alpha(t+h)- u|\leq e ^{-\alpha h}|x_\alpha (t) - u|+ \int_t ^{t+h}\!\!\!\!e^{-\alpha (t+h-s)}|f(s)- \alpha u- g(u)|ds, \quad t < t+h.$$ Since $x_\alpha$ is $T$-periodic, by taking $h=T$ we can write $$|x_\alpha (t) - u |\leq \frac{1}{1- e ^{-\alpha T}}\int_0 ^T e^{-\alpha (T-s)} |f(s) - \alpha u - g(u)|ds,\quad t \in \mathbb{R},$$ and thus for $u=0$ we obtain $$\|x_\alpha\|_{L^{\infty}(\mathbb{R})} \leq \frac{1}{1-e ^{-\alpha T}}\int_0 ^T|f(s)-g(0)|ds\sim \mathcal{O} \left (\frac{1}{\alpha} \right ), \quad \alpha >0.$$ Now we can state our main existence result. \begin{theorem} \label{1D_NecAndSufCond} Assume that $g$ is increasing Lipschitz continuous, and $f$ is $T$-periodic and continuous. Then equation (\ref{1D}) has periodic solutions if and only if $\langle f\rangle:= \frac{1}{T}\int_0 ^Tf(t)dt \in \mathop{\rm Range}(g)$ (there is $x_0 \in \mathbb{R}$ such that $\langle f\rangle=g(x_0)$). Moreover in this case we have the estimate $$\|x\|_{L^{\infty}(\mathbb{R})}\leq |x_0|+ \int_0 ^T |f(t) - \langle f\rangle|dt, \quad \forall \; x_0 \in g^{-1}\langle f\rangle,$$ and the solution is unique if and only if $Int (\mathcal{O}\langle f\rangle) = \emptyset$ or $$\mathop{\rm diam} ( g^{-1}\langle f\rangle) \leq \mathop{\rm diam} (\mathop{\rm Range} \int \{f(t)-\langle f\rangle\}dt).$$ \end{theorem} \paragraph{Proof} The condition is necessary (see Proposition \ref{Prop1DNecCond}). We will prove now that it is also sufficient. Let us consider the sequence of periodic solutions $(x_\alpha)_{\alpha > 0}$ of (\ref{1DAbs}). Accordingly to the Proposition \ref{UnifBound} we need to prove uniform estimates in $L^{\infty}(\mathbb{R})$ for $(x_\alpha)_{\alpha > 0}$. Since $x_\alpha$ is $T$-periodic by integration on $[0,T]$ we get $$\int_0 ^T \{\alpha x_\alpha (t) + g(x_\alpha (t))\}dt = T\langle f\rangle, \quad \alpha >0.$$ Using the average formula for continuous functions we have $$\int_0 ^T\{\alpha x_\alpha (t) + g (x_\alpha(t)) \}dt = T \{ \alpha x_\alpha(t_\alpha) + g(x_\alpha(t_\alpha)) \}, \quad t_\alpha \in ]0,T[, \; \alpha >0.$$ By the hypothesis there is $x_0 \in \mathbb{R}$ such that $\langle f\rangle = g(x_0)$ and thus $$\label{Rezolv} \alpha x_\alpha(t_\alpha) + g(x_\alpha (t_\alpha)) = g(x_0), \quad \alpha >0.$$ Since $g$ is increasing, we deduce $$\alpha x_\alpha(t_\alpha) [x_0 - x_\alpha (t_\alpha)] = [g(x_0) - g(x_\alpha (t_\alpha))] [x_0 - x_\alpha (t_\alpha)] \geq 0,\quad \alpha >0,$$ and thus $$|x_\alpha(t_\alpha)|^2 \leq x_\alpha (t_\alpha) x_0 \leq |x_\alpha(t_\alpha)||x_0|.$$ Finally we deduce that $x_\alpha(t_\alpha)$ is uniformly bounded in $\mathbb{R}$: $$|x_\alpha (t_\alpha)| \leq |x_0|, \quad \forall \; \alpha > 0.$$ Now we can easily find uniform estimates in $L^{\infty}(\mathbb{R})$ for $(x_\alpha)_{\alpha >0}$. Let us take in the previous calculus $u = x_\alpha (t_\alpha)$and integrate on $[t_\alpha, t]$: $$\frac{1}{2}e^{2\alpha t}(x_\alpha(t) - x_\alpha(t_\alpha))^2 \leq \int _{t_\alpha} ^t e^{2\alpha s}|f(s) - \alpha x_\alpha(t_\alpha) - g(x_\alpha(t_\alpha))|\cdot |x_\alpha(s) - x_\alpha (t_\alpha)|ds.$$ By using Bellman's lemma we get $$|x_\alpha(t) - x_\alpha(t_\alpha)| \leq \int_{t_\alpha} ^t e^{-\alpha (t-s)}|f(s) - \alpha x_\alpha(t_\alpha) - g(x_\alpha(t_\alpha))|ds,\quad t > t_\alpha,$$ and hence by (\ref{Rezolv}) we deduce \begin{eqnarray} \label{Lab} |x_\alpha(t)| & \leq & |x_0| + \int_0 ^T |f(t) - \alpha x_\alpha (t_\alpha) - g(x_\alpha (t_\alpha ))|dt \nonumber \\ & = & |x_0| + \int_0 ^T|f(t) - \langle f\rangle|dt,\quad t \in \mathbb{R}, \alpha > 0. \end{eqnarray} Now by passing to the limit in (\ref{Lab}) we get $$|x(t)|\leq |x_0| + \int_0 ^T |f(t) - \langle f\rangle|dt, \quad t \in \mathbb{R}, \; \forall \;x_0 \in g^{-1}\langle f\rangle.$$ \subsection{Sub(super)-periodic solutions} In this part we generalize the previous existence results for sub(super)-periodic solutions. We will see that similar results hold. Let us introduce the concept of sub(super)-periodic solutions. \begin{defi} \label{SubPer} \rm We say that $x \in C^1([0,T]; \mathbb{R})$ is a sub-periodic solution for (\ref{1D}) if $$x'(t) + g(x(t)) = f(t), \quad t \in [0,T],$$ and $x(0) \leq x(T)$. \end{defi} Note that a necessary condition for the existence is given next. \begin{prop} \label{SubNecCond} If equation (\ref{1D}) has sub-periodic solutions, then there is $x_0\in \mathbb{R}$ such that $g(x_0) \leq \,\,\langle f\rangle$. \end{prop} \paragraph{Proof} Let $x$ be a sub-periodic solution of (\ref{1D}). By integration on $[0,T]$ we find $$x(T) - x(0) + \int_0 ^T g(x(t))dt = T\langle f\rangle.$$ Since $g\circ x$ is continuous, there is $\tau \in ]0,T[$ such that $$g(x(\tau))= \langle f\rangle - \frac{1}{T}(x(T)-x(0)) \leq \,\,\langle f\rangle.$$ Similarly we define the notion of super-periodic solution. \begin{defi} \label{SuperPer} We say that $y \in C^1([0,T]; \mathbb{R})$ is a super-periodic solution for (\ref{1D}) if $$y'(t) + g(y(t)) = f(t), \quad t [0,T],$$ and $y(0) \geq y(T)$. \end{defi} The analogous necessary condition holds. \begin{prop} \label{SuperNecCond} If equation (\ref{1D}) has super-periodic solutions, then there is $y_0\in \mathbb{R}$ such that $g(y_0) \geq \,\,\langle f\rangle$. \end{prop} \begin{remark} \rm It is clear that $x$ is periodic solution for (\ref{1D}) if and only if is in the same time sub-periodic and super-periodic solution. Therefore there are $x_0,y_0 \in \mathbb{R}$ such that $$g(x_0) \leq \,\,\langle f\rangle \,\,\leq g(y_0).$$ Since $g$ is continuous, we deduce that $\langle f\rangle \in \mathop{\rm Range}(g)$ which is exactly the necessary condition given by the Proposition \ref{Prop1DNecCond}. \end{remark} As before we will prove that the necessary condition of Proposition \ref{SubNecCond} is also sufficient for the existence of sub-periodic solutions. \begin{theorem} \label{SubSufCond} Assume that $g$ is increasing Lipschitz continuous and $f$ is $T$-periodic continuous. Then equation (\ref{1D}) has sub-periodic solutions if and only if there is $x_0 \in \mathbb{R}$ such that $g(x_0)\leq \langle f\rangle$. \end{theorem} \paragraph{Proof} The condition is necessary (see Proposition \ref{SubNecCond}). Let us prove now that it is also sufficient. Consider $z_0$ an arbitrary initial data and denote by $x:[0,\infty[\to\mathbb{R}$ the solution for (\ref{1D}) with the initial condition $x(0) = z_0$. If there is $t_0 \geq 0$ such that $x(t_0) \leq x(t_0 + T)$, thus $x_{t_0}(t) := x(t_0 + t),\, t \in [0,T]$ is a sub-periodic solution. Suppose now that $x(t) > x(t+T), \, \forall t \in \mathbb{R}$. By integration on $[nT,(n+1)T]$, $n\geq 0$ we get $$x((n+1)T) - x(nT) + \int_0 ^T g(x(nT + t))dt = T\langle f\rangle, \, n \geq 0.$$ Using the hypothesis and the average formula we have $$g(x(nT+\tau_n)) = \langle f\rangle + \frac{1}{T}\{x(nT) - x((n+1)T)\} > g(x_0),$$ for $\tau_n\in ]0,T[$ and $n \geq 0$. Since $g$ is increasing we deduce that $x(nT + \tau_n ) > x_0, \, n \geq 0$. We have also $x(nT+\tau_n) \leq x((n-1)T + \tau_n) \leq \cdots \leq x(\tau_n) \leq \sup_{t \in [0,T]}|x(t)|$ and thus we deduce that $(x(nT+\tau_n))_{n\geq 0 }$ is bounded: $$|x(nT+\tau_n)| \leq K, \quad n \geq 0.$$ Consider now the functions $x_n:[0,T]\to\mathbb{R}$ given by $$x_n(t) = x(nT+t), \quad t \in [0,T].$$ By a standard computation we get $$\frac{1}{2}\frac{d}{dt}|x_n(t)|^2 + [g(x_n(t)) - g(0)]x_n(t) = [f(t) - g(0)]x_n(t), \quad t \in [0,T].$$ Using the monotony of $g$ we obtain $$|x_n(t)| \leq |x_n(s) |+ \int_s ^t |f(u) - g(0)|du, \quad 0\leq s \leq t \leq T.$$ Taking $s=\tau_n \in ]0,T[$ we can write $$|x_n(t)|\leq |x_n(\tau_n)| + \int_{\tau_n} ^t |f(u) - g(0)|du \leq K + \int_0 ^T |f(u) - g(0)|du, \; t \in [\tau_n ,T].$$ For $t \in [0,\tau_n]$, $n \geq 1$ we have \begin{eqnarray} |x_n(t)| = |x(nT+ t)| & \leq & |x((n-1)T + \tau_{n-1})|+ \int_{(n-1)T+ \tau _{n-1}} ^ {nT + t }|f(u) - g(0)|du \nonumber \\ & \leq & K + \int_{(n-1)T} ^ {(n+1)T}|f(u) - g(0)|du \nonumber \\ & \leq & K + 2\int_0 ^T |f(u) - g(0)|du.\nonumber \end{eqnarray} Therefore, the sequence $(x_n)_{n \geq 0}$ is uniformly bounded in $L^{\infty}(\mathbb{R})$ and $$\|x_n\|_{L^{\infty}(\mathbb{R})} \leq K + 2\int_0 ^T |f(t) - g(0)|dt := M.$$ Moreover, $(x_n')_{n\geq 0}$ is also uniformly bounded in $L^{\infty}(\mathbb{R})$. Indeed we have $$|x_n '(t)|= |f(t) - g(x_n(t))|\leq \|f\|_{L^{\infty}(\mathbb{R})} + \max \{g(M), -g(-M)\},$$ and hence, by Arzela-Ascoli's theorem we deduce that $(x_n)_{n\geq 0}$ converges in $C^0([0,T],\mathbb{R})$: $$\lim_{n \to \infty} x_n(t) = u(t), \, \mbox{uniformly for } t \in [0,T].$$ As usual, by passing to the limit for $n \to \infty$ we find that $u$ is also solution for (\ref{1D}). Moreover since $(x(nT))_{n \geq 0 }$ is decreasing and bounded, it is convergent and we can prove that $u$ is periodic: $$u(0) = \lim_{n \to \infty} x_n(0) = \lim_{n \to \infty} x(nT) = \lim_{n \to \infty} x((n+1)T) = \lim_{n \to \infty} x_n(T) = u(T).$$ Therefore, $u$ is a sub-periodic solution for (\ref{1D}). An analogous result holds for super-periodic solutions. \begin{prop} \label{SuperSufCond} Under the same assumptions as in Theorem \ref{SubSufCond} the equation (\ref{1D}) has super-periodic solutions if and only if there is $y_0\in \mathbb{R}$ such that $g(y_0) \geq \langle f\rangle$. \end{prop} We state now a comparison result between sub-periodic and super-periodic solutions. \begin{prop}\label{Compar} If $g$ is increasing, $x$ is a sub-periodic solution and $y$ is a super-periodic solution we have $$x(t) \leq y(t), \quad \forall t \in [0,T],$$ provided that $x$ and $y$ are not both periodic. \end{prop} \paragraph{Proof} Both $x$ and $y$ verify (\ref{1D}), thus $$(x-y)'(t) + g(x(t))-g(y(t)) = 0, \quad t \in [0,T].$$ With the notation \begin{eqnarray} \label{R} r(t) = \left \{ \begin{array}{ll} \frac{g(x(t)) - g(y(t))}{x(t) - y(t)} & t \in [0,T], \, x(t) \neq y(t)\\ 0 & t \in [0,T], \, x(t) = y(t), \end{array} \right. \end{eqnarray} we can write $g(x(t)) - g(y(t)) = r(t) ( x(t) - y(t))$, $t \in [0,T]$ and therefore, $$(x-y)'(t) + r(t) (x(t) - y(t))= 0, \, t \in [0,T]$$ which implies $$\label{Exp} x(t) - y(t) = (x(0) - y(0)) e ^ {-\int_0 ^t r(s)ds}.$$ Now it is clear that if $x(0) \leq y(0)$ we also have $x(t) \leq y(t), \, t \in [0,T]$. Suppose now that $x(0) > y(0)$. Taking $t=T$ in (\ref{Exp}) we obtain $$\label{Exp1} x(T) - y(T) = (x(0) - y(0)) e ^ { -\int_0 ^ T r(t)dt}.$$ Since $g$ is increasing, by the definition of the function $r$ we have $r\geq 0$. Two cases are possible: (i) either $\int_0 ^T r(t)dt > 0$, (ii) either $\int_0 ^T r(t)dt = 0$ in which case $r(t) = 0, \, t \in [0,T]$ ($r$ vanishes in all points of continuity $t$ such that $x(t) \neq y(t)$ and also in all points $t$ with $x(t) = y(t)$ by the definition). Let us analyse the first case (i). By (\ref{Exp1}) we deduce that $x(T) - y(T) < x(0) - y(0)$ or $x(T) - x(0) < y(T) - y(0)$. Since $x$ is sub-periodic we have $x(0) \leq x(T)$ which implies that $y(T) > y(0)$ which is in contradiction with the super-periodicity of $y$ ( $y(T) \leq y(0)$).\\ In the second case (ii) we have $g(x(t)) = g(y(t)), \, t \in [0,T]$ so $(x-y)'=0$ and therefore there is a constant $C\in \mathbb{R}$ such that $x(t) = y(t) + C, \, t \in [0,T]$. Taking $t=0$ and $t = T$ we obtain $$0\geq x(0) - x(T) = y(0) - y(T) \geq 0,$$ and thus $x$ and $y$ are both periodic which is in contradiction with the hypothesis. In the following we will see how it is possible to retrieve the existence result for periodic solutions by using the method of sub(super)-periodic solutions. Suppose that $\langle f\rangle \in \mathop{\rm Range}(g)$. Obviously both sufficient conditions for existence of sub(super)-periodic solutions are satisfied and thus there are $x_0(y_0)$ sub(super)-periodic solutions. If $y_0$ is even periodic the proof is complete. Assume that $y_0$ is not periodic ($y_0(0) > y_0(T)$). Denote by $\mathcal{M}$ the set of sub-periodic solutions for (\ref{1D}): $$\mathcal{M} = \{x:[0,T]\to \mathbb{R} : x \mbox{ sub-periodic solution }, \quad x_0(t) \leq x(t), \; t \in [0,T]\}.$$ Since $x_0 \in \mathcal{M}$ we have $\mathcal{M} \neq \emptyset$. Moreover, from the comparison result since $y_0$ is super-periodic but not periodic we have $x\leq y_0, \, \forall x \in \mathcal{M}$. We prove that $\mathcal{M}$ contains a maximal element in respect to the order: $$x_1 \prec x_2 \mbox{ (if and only if) } x_1(t) \leq x_2(t), \quad t \in [0,T].$$ Finally we show that this maximal element is even a periodic solution for (\ref{1D}) since otherwise it would be possible to construct a sub-periodic solution greater than the maximal element. We state now the following generalization. \begin{theorem} \label{1DGen} Assume that $g : \mathbb{R} \times \mathbb{R} \to \mathbb{R}$ is increasing Lipschitz continuous function in $x$, $T$-periodic and continuous in $t$ and $f:\mathbb{R} \to \mathbb{R}$ is $T$-periodic and continuous in $t$. Then the equation $$\label{GenEq} x'(t) + g(t,x(t)) = f(t), \quad t \in \mathbb{R},$$ has periodic solutions if and only if there is $x_0 \in \mathbb{R}$ such that $$\label{GenNecAndSufCond} \langle f\rangle:= \frac{1}{T}\int_0 ^T f(t)dt = \frac {1}{T}\int_0 ^T g(t,x_0)dt = G(x_0).$$ Moreover, in this case we have the estimate $$\|x\|_{L^{\infty}(\mathbb{R})} \leq |x_0| + \int_0 ^T |f(t) - g(t,x_0)|dt, \quad \forall \; x_0 \in G^{-1}\langle f\rangle.$$ \end{theorem} \paragraph{Proof} Consider the average function $G:\mathbb{R} \to \mathbb{R}$ given by $$G(x) = \frac{1}{T}\int_0 ^Tg(t,x)dt, \, x \in \mathbb{R}.$$ It is easy to check that $G$ is also increasing and Lipschitz continuous with the same constant. Let us prove that the condition (\ref{GenNecAndSufCond}) is necessary. Suppose that $x$ is a periodic solution for (\ref{GenEq}). By integration on $[0,T]$ we get $$\label{Ave} \frac{1}{T}\int_0 ^T g(t,x(t))dt = \langle f\rangle.$$ We can write $$m \leq x(t) \leq M, \, t \in [0,T],$$ and thus $$g(t,m) \leq g(t,x(t)) \leq g(t,M), \, t \in [0,T],$$ which implies $$G(m) = \frac{1}{T}\int_0 ^Tg(t,m)dt \leq \frac{1}{T}\int_0 ^T g(t,x(t))dt \leq \frac{1}{T}\int_0 ^Tg(t,M)dt = G(M).$$ Since $G$ is continuous it follows that there is $x_0 \in [m,M]$ such that $G(x_0) = \frac{1}{T}\int_0 ^T g(t,x(t))dt$ and from (\ref{Ave}) we deduce that $\langle f\rangle = G(x_0)$.\\ Let us show that the condition (\ref{GenNecAndSufCond}) is also sufficient. As before let us consider the unique periodic solution for $$\alpha x_\alpha (t ) + x_\alpha '(t) + g(t,x_\alpha(t)) = f(t), \quad t \in[0,T], \; \alpha >0,$$ (existence and uniqueness follow by the Banach's fixed point theorem exactly as before). All we need to prove is that $(x_\alpha)_{\alpha > 0}$ is uniformly bounded in $L^{\infty}(\mathbb{R})$ (then $(x_\alpha ')_{\alpha > 0}$ is also uniformly bounded in $L^{\infty}(\mathbb{R})$ and by Arzela-Ascoli's theorem we deduce that $x_\alpha$ converges to a periodic solution for (\ref{GenEq})). Taking the average on $[0,T]$ we get $$\frac{1}{T}\int_0 ^T \{\alpha x_\alpha(t) + g(t,x_\alpha (t))\}dt = \langle f\rangle = G(x_0), \quad \alpha >0.$$ As before we can write $$\alpha m_\alpha + g(t,m_\alpha) \leq \alpha x_\alpha(t) + g(t, x_\alpha(t)) \leq \alpha M_{\alpha} + g(t,M_\alpha), \quad t \in [0,T],\; \alpha >0,$$ where $$m_\alpha \leq x_\alpha (t) \leq M_\alpha, \quad t \in [0,T], \; \alpha >0,$$ and hence $$\alpha m_\alpha + G(m_\alpha) \leq \frac{1}{T}\int_0 ^T \{\alpha x_\alpha (t) + g(t,x_\alpha(t))\}dt \leq \alpha M_\alpha + G(M_\alpha), \quad \alpha >0.$$ Finally we get $$\label{GenAve} G(x_0) = \frac{1}{T}\int_0 ^T \{ \alpha x_\alpha(t) + g(t, x_\alpha(t))\}dt = \alpha u_\alpha + G(u_\alpha), \quad u_\alpha \in ]m_\alpha, M_\alpha[, \; \alpha >0.$$ Multiplying by $u_\alpha - x_0$ we obtain $$\alpha u_\alpha (u_\alpha - x_0) = - (G(x_0) - G(u_\alpha)) (x_0 - u_\alpha), \quad \alpha >0.$$ Since $G$ is increasing we deduce that $|u_\alpha|^2 \leq u_\alpha x_0 \leq |u_\alpha| \cdot |x_0|$, $\alpha >0$ and hence $(u_\alpha)_{\alpha >0}$ is bounded: $$|u_\alpha |\leq |x_0|, \quad \alpha >0.$$ Now using (\ref{GenAve}) it follows $$\frac{1}{T}\int_0 ^T \{ \alpha x_\alpha (t) + g(t,x_\alpha(t)) \} dt = \frac{1}{T}\int_0 ^T \{ \alpha u_\alpha + g(t, u_\alpha) \}dt,$$ and thus there is $t_\alpha \in ]0,T[$ such that $$\alpha x_\alpha (t_\alpha) + g(t_\alpha , x_\alpha (t_\alpha)) = \alpha u_\alpha + g(t_\alpha , u_\alpha), \quad \alpha >0.$$ Since $\alpha (x_\alpha(t_\alpha) - u_\alpha ) ^2 = -[ g(t_\alpha , x_\alpha (t_\alpha)) - g(t_\alpha, u_\alpha) ][x_\alpha (t_\alpha) - u_\alpha] \leq 0$ we find that $x_\alpha (t_\alpha) = u_\alpha, \, \alpha > 0$ and thus $(x_\alpha (t_\alpha))_{\alpha > 0}$ is also bounded $$|x_\alpha (t_\alpha)| \leq |x_0|, \quad \alpha >0.$$ Now by standard calculations we can write \begin{eqnarray*} \lefteqn{\frac{1}{2}\frac{d}{dt}|x_\alpha (t) - x_\alpha ( t_\alpha) |^2 + [g(t,x_\alpha (t)) - g(t,x_\alpha (t_\alpha)) ][x_\alpha (t)- x_\alpha (t_\alpha )] } \\ & \leq& [f(t) -\alpha x_\alpha (t_\alpha) - g(t,x_\alpha (t_\alpha))][x_\alpha(t) - x_\alpha (t_\alpha)], \quad t \in \mathbb{R}, \nonumber \end{eqnarray*} and thus $$|x_\alpha (t) - x_\alpha (t_\alpha)| \leq \int_{t_\alpha} ^t |f(s) - \alpha x_\alpha (t_\alpha ) - g(s,x_\alpha (t_\alpha ))|ds, \quad t>t_\alpha, \; \alpha >0,$$ which implies $$\label{es} |x_\alpha (t) |\leq |x_0| + \int_0 ^T |f(t) - \alpha x_\alpha (t_\alpha) - g(t, x_\alpha (t_\alpha))|dt, \quad t \in [0,T], \; \alpha >0.$$ Since $(x_\alpha (t_\alpha))_{\alpha >0}$ is bounded we have $$u_\alpha = x_\alpha (t_\alpha)\to x_1,$$ such that $$G(x_0) = \lim_ {\alpha \to 0}\{\alpha u_\alpha + G(u_\alpha)\} = G(x_1).$$ Moreover, if $x_0 \leq x_1$ we have $$0 \leq \frac{1}{T}\int_0 ^T [g(t,x_1) - g(t,x_0)]dt = G(x_1) - G(x_0) = 0,$$ and hence $g(t,x_1) = g(t,x_0)$ for all $t \in [0,T]$. Obviously the same equalities hold if $x_0 > x_1$. Now by passing to the limit in (\ref{es}) we find \begin{eqnarray} |x(t)| & \leq & |x_0| + \int_0 ^T |f(t) - g(t,x_1)|dt\\ \nonumber & = & |x_0| + \int_0 ^T |f(t) - g(t,x_0)|dt, \quad t \in [0,T], \; \forall \; x_0 \in G^{-1}\langle f\rangle, \end{eqnarray} and therefore $(x_\alpha)_{\alpha >0}$ is uniformly bounded in $L^{\infty}(\mathbb{R})$. %\input 1D_PerEvEq.tex \section{Periodic solutions for evolution equations on Hilbert spaces } In this section we analyze the existence and uniqueness of periodic solutions for general evolution equations on Hilbert spaces $$\label{MD} x'(t) + Ax(t) = f(t), \quad t>0,$$ where $A:D(A)\subset H \to H$ is a maximal monotone operator on a Hilbert space $H$ and $f\in C^1(\mathbb{R};H)$ is a $T$-periodic function. As known by the theory of Hille-Yosida, for every initial data $x_0 \in D(A)$ there is an unique solution $x \in C^1([0,+\infty[; H) \cap C([0,+\infty[\;; D(A))$ for (\ref{MD}), see \cite[p. 101]{brezis}. Obviously, the periodic problem reduces to find $x_0 \in D(A)$ such that $x(T) = x_0$. As in the one dimensional case we demonstrate uniqueness for strictly monotone operators. We state also necessary and sufficient condition for the existence in the linear symmetric case. Finally the case of non-linear sub-differential operators is considered. Let us start with the definition of periodic solutions for (\ref{MD}). \begin{defi} \label{MD_PerSol} Let $A:D(A)\subset H \to H$ be a maximal monotone operator on a Hilbert space $H$ and $f \in C^1(\mathbb{R};H)$ a $T$-periodic function. We say that $x\in C^1([0,T] ;H) \cap C([0,T] ; D(A))$ is a periodic solution for (\ref{MD}) if and only if $$x'(t) + Ax(t) = f(t), \quad t \in [0,T],$$ and $x(0) = x(T)$. \end{defi} \subsection{Uniqueness} Generally the uniqueness does not hold (see the example in the following paragraph). However it occurs under the hypothesis of strictly monotony. \begin{prop} \label{MD_Uniq} Assume that $A$ is strictly monotone ($(Ax_1 -Ax_2, x_1-x_2) = 0$ implies $x_1 = x_2$). Then (\ref{MD}) has at most one periodic solution. \end{prop} \paragraph{Proof} Let $x_1$, $x_2$ be two different periodic solutions. By taking the difference of (\ref{MD}) and multiplying both sides by $x_1(t) - x_2(t)$ we find $$\frac{1}{2}\frac{d}{dt}\|x_1(t)- x_2(t)\|^2 + (Ax_1(t) - Ax_2(t),\quad x_1(t) - x_2(t)) = 0, \quad t \in [0,T].$$ By the monotony of $A$ we deduce that $\|x_1 - x_2\|^2$ is decreasing and therefore we have $$\|x_1(0) - x_2(0)\| \geq \|x_1(t) - x_2(t)\| \geq \|x_1(T) - x_2(T)\|, \quad t \in [0,T].$$ Since $x_1$ and $x_2$ are $T$-periodic we have $$\|x_1(0) - x_2(0)\| = \|x_1(T) - x_2(T)\|,$$ which implies that $\|x_1(t) - x_2(t)\|$ is constant for $t \in [0,T]$ and thus $$(Ax_1(t) - Ax_2(t), \quad x_1(t) - x_2(t) ) = 0, \quad t \in [0,T].$$ Now uniqueness follows by the strictly monotony of $A$. \subsection{Existence} In this section we establish existence results. In the linear case we state the following necessary condition. \begin{prop} \label{MD_NecCond} Let $A:D(A) \subset H \to H$ be a linear maximal monotone operator and $f\in L^1(]0,T[;H)$ a $T$-periodic function. If (\ref{MD}) has $T$-periodic solutions, then the following necessary condition holds. $$\langle f\rangle := \frac{1}{T}\int_0 ^T f(t)dt \in \mathop{\rm Range}(A),$$ (there is $x_0 \in D(A)$ such that $\langle f\rangle = Ax_0$). \end{prop} \paragraph{Proof} Suppose that $x\in C^1([0,T];H) \cap C([0,T];D(A))$ is a $T$-periodic solution for (\ref{MD}). Let us consider the divisions $\Delta_n : 0 = t_0^n < t_1 ^n <\dots< t_n ^n = T$ such that $$\label{Div} \lim_{n \to \infty} \max_{1 \leq i \leq n}|t_i ^n - t_{i-1}^n | = 0.$$ We can write $$(t_i ^n - t_{i-1}^n ) x'(t_{i-1}^n) + ( t_i ^n - t_{i-1}^n ) Ax(t_{i-1}^n) = (t_i ^n - t_{i-1} ^ n) f(t_{ i-1 } ^n ) , \quad 1\leq i \leq n.$$ Since $A$ is linear we deduce $$\frac{1}{T}\sum_{i=1} ^n ( t_i ^n - t_{ i-1 } ^n ) x'(t_{ i-1} ^n) + A\big( \frac{1}{T}\sum_{i = 1} ^ n (t_i ^n - t_{i-1} ^n )x(t_{i-1}^n )\big) =\frac{1}{T}\sum_{i=1} ^n (t_i ^n - t_{i-1} ^n ) f(t_{i-1} ^ n),$$ and hence $$\big[ \frac{1}{T}\sum_{i = 1} ^ n (t_i ^n - t_{i-1} ^n )x(t_{i-1}^n )), \frac{1}{T}\sum_{i=1} ^n (t_i ^n - t_{i-1} ^n ) [f(t_{i-1} ^ n) - x'(t_ {i-1} ^n) ] \big] \in A.$$ By (\ref{Div}) we deduce that $$\frac{1}{T}\sum_{i = 1} ^ n (t_i ^n - t_{i-1} ^n )x(t_{i-1}^n )) \to \frac{1}{T}\int_0 ^T x(t)dt,$$ and \begin{eqnarray} \frac{1}{T}\sum_{i=1} ^n (t_i ^n - t_{i-1} ^n ) [f(t_{i-1} ^ n) - x'(t_ {i-1} ^n) ] & \to & \frac{1}{T}\int_0 ^T [f(t) - x'(t)]dt \nonumber \\ & = &\frac{1}{T}\int_0 ^T f(t)dt - \frac{1}{T}x(t)|_0 ^T \nonumber \\ & = & \frac{1}{T}\int_0 ^T f(t)dt. \nonumber \end{eqnarray} Since $A$ is maximal monotone $Graph(A)$ is closed and therefore $$\Big[ \frac{1}{T}\int_0 ^T x(t)dt , \frac{1}{T}\int_0 ^T f(t)dt\Big] \in A.$$ Thus $\frac{1}{T}\int_0 ^T x(t)dt \in D(A)$ and $\langle f\rangle = A( \frac{1}{T}\int_0 ^T x(t)dt)$. Generally the previous condition is not sufficient for the existence of periodic solutions. For example let us analyse the periodic solutions $x=(x_1,x_2) \in C^1([0,T]; \mathbb{R}^2)$ for $$\label{Example} x'(t) + Ax(t) = f(t), \; t \in [0,T],$$ where $A:\mathbb{R}^2 \to \mathbb{R}^2$ is the orthogonal rotation: $$A(x_1,x_2) = (-x_2,x_1), \; (x_1,x_2) \in \mathbb{R}^2,$$ and $f=(f_1,f_2) \in L^1(]0,T[; \; \mathbb{R} ^2)$ is $T$-periodic. For a given initial data $x(0) = x_0 \in \mathbb{R} ^2$ the solution writes $$\label{Expl} x(t) = e ^{-tA}x_0 + \int_0 ^t e ^ {-(t-s)A}f(s)ds, \quad t >0,$$ where the semigroup $e^{-tA}$ is given by $$\label{SemiGroup} e ^{-tA} = \begin{pmatrix} \cos t & \sin t\\ -\sin t & \cos t \end{pmatrix}.$$ Since $e^{-2 \pi A} = 1$ we deduce that the equation (\ref{Example}) has $2\pi$-periodic solutions if and only if $$\label{PerCond} \int_0 ^{2\pi}e ^ {tA}f(t)dt = 0.$$ Thus if $\int_0 ^{2\pi } \{ f_1(t)\cos t - f_2(t) \sin t \} dt \neq 0$ or $\int_0 ^{2\pi} \{f_1(t) \sin t + f_2(t) \cos t \}dt \neq 0$ equation (\ref{Example}) does not have any $2\pi$-periodic solution and the necessary condition still holds because $\mathop{\rm Range}(A) = \mathbb{R} ^2$. Moreover if (\ref{PerCond}) is satisfied then every solution of (\ref{Example}) is periodic and therefore uniqueness does not occur (the operator $A$ is not strictly monotone). Let us analyse now the existence. As in the one dimensional case we have \begin{prop} \label{MD_LAM} Suppose that $A:D(A)\subset H \to H$ is maximal monotone and $f \in C^1(\mathbb{R};H)$ is $T$-periodic. Then for every $\alpha > 0$ the equation $$\label{MD_AlphaEq} \alpha x(t) + x'(t) + Ax(t) = f(t), \quad t \in \mathbb{R},$$ has an unique $T$-periodic solution in $C^1(\mathbb{R}; H) \cap C(\mathbb{R}; D(A))$. \end{prop} \paragraph{Proof} Since $\alpha + A$ is strictly monotone the uniqueness follows from Proposition \ref{MD_Uniq}. Indeed, $$\alpha \|x- y\|^2 + (Ax - Ay, x-y) = 0, \quad x, y \in D(A),$$ implies $\alpha \|x-y\|^2 = 0$ and hence $x=y$. \\ Consider now an arbitrary initial data $x_0 \in D(A)$. By the Hille-Yosida's theorem, there is $x \in C^1([0, +\infty[; H) \cap C([0, +\infty[; D(A))$ solution for (\ref{MD_AlphaEq}). Denote by $(x_n)_{n\geq 0}$ the functions $$x_n(t) = x(nT + t), \quad t \in [0,T], \; n \geq 0.$$ We have $$\alpha x_{n+1}(t) + x_{n+1}'(t) + Ax_{n+1}(t) = f((n+1)T + t), \quad t \in [0,T],$$ and $$\alpha x_{n}(t) + x_{n}'(t) + Ax_{n}(t) = f(nT + t), \quad t \in [0,T].$$ Since $f$ is $T$-periodic, after usual computations we get \begin{eqnarray*} \alpha \|x_{n+1}(t) - x_n(t) \|^2 + \frac{1}{2}\frac{d}{dt}\|x_{n+1}(t) - x_n(t)\|^2 && \\ + ( Ax_{n+1}(t) - Ax_n(t), \; x_{n+1}(t) - x_n(t)) &=& 0, \quad t \in [0,T]. \end{eqnarray*} Taking into account that $A$ is monotone we deduce $$\|x_{n+1}(t) - x_n(t) \|\leq e ^{-\alpha t} \|x_{n+1}(0) - x_n(0)\|, \quad t \in [0,T],$$ and hence \begin{eqnarray} \|x_{n+1}(0) - x_n(0)\| & = & \|x_n(T) - x_{n-1}(T)\| \nonumber \\ & \leq & e^{-\alpha T} \|x_{n}(0) - x_{n-1}(0)\| \nonumber \\ & \leq & e ^{-2\alpha T}\|x_{n-1}(0) - x_{n-2}(0)\|\nonumber \\ & \leq & ...\nonumber \\ & \leq & e ^{-n\alpha T} \|x_1(0) - x_0(0)\|, \quad n \geq 0. \end{eqnarray} Finally we get the estimate $$\|x_{n+1}(t) - x_n(t) \|\leq e ^ {-\alpha (nT + t)}\|S_\alpha (T; 0, x_0) - x_0\|, \quad t \in [0,T], \; n\geq 0.$$ Here $S_\alpha(t; 0, x_0)$ represents the solution of (\ref{MD_AlphaEq}) for the initial data $x_0$. From the previous estimate it is clear that $(x_n)_{n\geq 0}$ is convergent in $C^0 ([0,T]; H)$: $$x_n(t) = x_0(t) + \sum_{k=0} ^{n-1}(x_{k+1}(t) - x_k(t)), \quad t \in [0,T],$$ where \begin{eqnarray} \big\|\sum_{k=0} ^{n-1}( x_{k+1}(t) - x_k(t) )\big\| & \leq & \sum_{k=0} ^{n-1} \|x_{k+1}(t) - x_k(t)\| \nonumber \\ & \leq & \sum_{k=0} ^{n-1}e ^{-\alpha (kT+t)}\|S_\alpha(T; 0,x_0) - x_0\| \nonumber \\ & \leq & \frac {e^{-\alpha t }}{1 - e ^ {-\alpha T}}\|S_\alpha (T; 0,x_0) - x_0\|. \nonumber \end{eqnarray} Moreover $\|x_n(t)\| \leq \|S_\alpha (t; 0,x_0)\| + \frac {1}{1-e ^ {-\alpha T}}\|S_\alpha (T; 0 , x_0) - x_0\|$. Denote by $x_\alpha$ the limit of $(x_n)_{n\geq 0}$ as $n\to \infty$. We should note that without any other hypothesis $(x_\alpha)_{\alpha > 0}$ is not uniformly bounded in $L^\infty(]0,T[;H)$. We have only estimate in $\mathcal{O}( 1 + \frac{1}{\alpha})$, $$\|x_\alpha\|_{L^\infty ([0,T]; H)} \leq C\big(1 + \frac{1}{1- e ^{-\alpha T}}\big)\sim \mathcal{O} \big(1 + \frac{1}{\alpha}\big).$$ The above estimate leads immediately to the following statement. \begin{remark} \label{AlphaSol} The sequence $(\alpha x_\alpha)_{\alpha > 0}$ is uniformly bounded in $L^\infty ([0,T]; H)$. \end{remark} Let us demonstrate that $x_\alpha$ is $T$-periodic and solution for (\ref{MD_AlphaEq}). Indeed, $$x_\alpha (0) = \lim_{n \to \infty} x_n(0) = \lim_{n \to \infty} x_{n-1}(T) = x_\alpha (T).$$ Now let us show that $(x_n')_{n\geq 0}$ is also uniformly bounded in $L^\infty(]0,T[;H)$. By taking the difference between the equations (\ref{MD_AlphaEq}) at the moments $t$ and $t+h$ we have $$\alpha (x (t+h) - x(t)) + x'(t+h) - x'(t) + Ax(t+h) - Ax(t) = f(t+h) - f(t), \quad t < t+h.$$ Multiplying by $x(t+h) - x(t)$ we obtain $$\alpha \|x(t+h) - x(t)\|^2 + \frac{1}{2}\frac{d}{dt}\|x(t+h) - x(t)\|^2 \leq \|f(t+h) - f(t) \| \cdot \|x(t+h) - x(t)\|,$$ which can be also rewritten as \begin{eqnarray} \frac{1}{2}{e ^{2\alpha t}\|x(t+h) - x(t)\|^2} & \leq& \int_0 ^t e^{\alpha s}\|f(s+h) - f(s)\| \cdot e ^{\alpha s}\|x(s+h) - x(s)\|ds \nonumber \\ &&+ \frac{1}{2}\|x(h) - x(0)\|^2, \quad t < t+h.\nonumber \end{eqnarray} By using Bellman's lemma we conclude that \begin{eqnarray} \frac{1}{h}\|x(t+h) - x(t)\| & \leq & \int_0 ^t e ^ {-\alpha (t-s)}\frac{1}{h} \| f(s+h) - f(s) \|ds\nonumber \\ &&+ e ^{-\alpha t} \frac{1}{h}\|x(h) - x(0)\| , \quad 0\leq t < t+h. \end{eqnarray} By passing to the limit for $h\to 0$ the previous formula yields \begin{eqnarray} \|x'(t)\| & \leq & e ^ {-\alpha t} \|x'(0)\|+ \int_0 ^t e ^ {-\alpha (t-s)}\|f'(s)\|ds \nonumber \\ & \leq & e ^{-\alpha t } \|f(0) - \alpha x_0 - Ax_0\| + \frac{1}{\alpha}( 1 - e ^ {-\alpha t} )\|f'\|_{L^\infty(]0,T[;H)} \nonumber \\ & \leq & \|f(0) - \alpha x_0 - Ax_0\|+ \frac{1}{\alpha}\|f'\|_{L^\infty(]0,T[;H)} < +\infty. \nonumber \end{eqnarray} Therefore $(x_n')_{n\geq 0}$ is uniformly bounded in $L^\infty(]0,T[;H)$ since $$\|x_n'\|_{L^\infty(]0,T[;H)} =\|x'(nT + (\cdot))\|_{L^\infty(]0,T[;H)} \leq \|x'\|_{L ^\infty ([0,+\infty[; H)},$$ and thus we have $x_n'(t) \rightharpoonup y_\alpha (t)$, $t \in [0,T]$. We can write $$(x_n(t), z) = (x_n(0), z) + \int_0 ^t (x_n'(s) ,z)ds, \quad z \in H,\; t \in [0,T], \; n\geq 0,$$ and by passing to the limit for $n \to \infty$ we deduce $$(x_\alpha (t), z) = (x_\alpha (0) , z) + \int_0 ^t (y_\alpha (s), z)ds, \quad z \in H,\; t \in [0,T],$$ which is equivalent to $$x_\alpha(t) = x_\alpha(0) + \int_0 ^t y_\alpha(s)ds, \quad t \in [0,T].$$ Therefore $x_\alpha$ is differentiable and $x_\alpha ' = y_\alpha$. Finally we can write $x_n'(t) \rightharpoonup x_\alpha '(t)$, $t \in [0,T]$. Let us show that $x_\alpha$ is also solution for (\ref{MD_AlphaEq}). We have $$[x_n(t), f(t) - \alpha x_n(t) - x_n'(t)]\in A, \quad n \geq 0, \; t \in [0,T].$$ Since $x_n(t) \to x_\alpha (t)$, $x_n'(t) \rightharpoonup x_\alpha '(t)$ and $A$ is maximal monotone we conclude that $$[x_\alpha (t), f(t) - x_\alpha '(t)] \in A, \quad t \in [0,T], \; \alpha >0,$$ which means that $x_\alpha (t) \in D(A)$ and $Ax_\alpha(t) = f(t) - x_\alpha '(t), \quad t \in [0,T]$.\\ \\ Now we establish for the linear case the similar result stated in Proposition \ref{UnifBound}. Before let us recall a standard result concerning maximal monotone operators on Hilbert spaces \begin{prop}\label{Comp} Assume that $A$ is a maximal monotone operator (linear or not) and $\alpha u_\alpha + Au_\alpha = f$, $u_\alpha \in D(A)$, $f \in H$, $\alpha >0$. Then the following statements are equivalent:\\ (i) $f \in \mathop{\rm Range}(A)$;\\ (ii) $(u_\alpha)_{\alpha >0}$ is bounded in $H$. Moreover, in this case $(u_\alpha)_{\alpha >0}$ is convergent in $H$ to the element of minimal norm in $A^{-1}f$. \end{prop} \paragraph{Proof} {it (i) $\to$ (ii)} By the hypothesis there is $u \in D(A)$ such that $f = Au$. After multiplication by $u_\alpha - u$ we get $$\alpha (u_\alpha, u_\alpha -u) + (Au_\alpha - Au, u_\alpha -u) = 0, \quad \alpha >0.$$ Taking into account that $A$ is monotone we deduce $$\|u_\alpha \|^2 \leq (u_\alpha, u) \leq \|u_\alpha \| \cdot \|u \|, \quad \alpha >0,$$ and hence $\|u_\alpha\| \leq \|u\|$, $\alpha >0$, $u \in A^{-1}f$ which implies that $u_\alpha \rightharpoonup u_0$. We have $[u_\alpha, f - \alpha u_\alpha] \in A$, $\alpha >0$ and since $A$ is maximal monotone, by passing to the limit for $\alpha \to 0$ we deduce that $[u_0,f]\in A$, or $u_0 \in A^{-1}f$. Moreover $$\|u_0\|= \|w-\lim_{\alpha \to 0} u_\alpha \|\leq \liminf_{\alpha \to 0}\|u_\alpha \| \leq \limsup_{\alpha \to 0} \|u_\alpha\|\leq \|u\|, \quad \forall u \in A ^{-1}f.$$ In particulat taking $u = u_0 \in A^{-1}f$ we get $$\|w-\lim_{\alpha \to 0} u_\alpha\|= \lim_{\alpha \to 0} \|u_\alpha\|,$$ and hence, since any Hilbert space is strictly convex, by Mazur's theorem we deduce that the convergence is strong $$u_\alpha \to u_0 \in A^{-1}f, \quad \alpha \to 0,$$ where $\|u_0\| = \inf_{u \in A^{-1}f} \|u\| = \min_{u \in A ^{-1}f}\|u\|$.\\ {\it (ii) $\to$ (i)} Conversely, suppose that $(u_\alpha)_{\alpha >0}$ is bounded in $H$. Therefore $u_\alpha \rightharpoonup u$ in $H$. We have $[u_\alpha , f -\alpha u_\alpha ] \in A$, $\alpha >0$ and since $A$ is maximal monotone by passing to the limit for $\alpha \to 0$ we deduce that $[u,f]\in A$ or $u \in D(A)$ and $f = Au$. \begin{theorem} \label{MD_LinNecAndSuf} Assume that $A:D(A)\subset H \to H$ is a linear maximal monotone operator on a compact Hilbert space $H$ and $f\in C^1(\mathbb{R};H)$ is a $T$-periodic function. Then the following statements are equivalent: \\ (i) equation (\ref{MD}) has periodic solutions; \\ (ii) the sequence of periodic solutions for (\ref{MD_AlphaEq}) is bounded in $C^1(\mathbb{R};H)$. Moreover in this case $(x_\alpha)_{\alpha > 0}$ is convergent in $C^0(\mathbb{R};H)$ and the limit is also a $T$-periodic solution for (\ref{MD}). \end{theorem} \paragraph{Proof} {\it (i) $\to$ (ii)} Denote by $x, x_\alpha$ the periodic solutions for (\ref{MD}) and (\ref{MD_AlphaEq}). By taking the difference and after multiplication by $x_\alpha (t) - x(t)$ we get: $$\label{Diff} \alpha \|x_\alpha (t)- x(t)\|^2 + \frac{1}{2}\frac{d}{dt}\|x_\alpha (t) - x(t)\|^2 \leq \alpha \|x(t)\| \cdot \|x_\alpha (t) - x(t)\|, \quad t \in \mathbb{R}.$$ Finally, after integration and by using Bellman's lemma, formula (\ref{Diff}) yields \begin{eqnarray} \|x_\alpha (t) - x(t)\|& \leq & e ^{-\alpha t} \|x_\alpha (0) - x(0) \| + \int_0 ^t \alpha e ^ {-\alpha (t-s)} \|x(s)\|ds \nonumber \\ & \leq & e ^ {-\alpha t} \|x_\alpha (0) - x(0)\| + (1 - e ^ {-\alpha t}) \|x\|_{L^\infty},\quad t \in \mathbb{R}.\nonumber \end{eqnarray} Since $x_\alpha$ and $x$ are $T$-periodic we can also write \begin{eqnarray} \|x_\alpha (t) - x(t) \| & = & \|x_\alpha (nT + t) - x(nT + t) \| \nonumber \\ & \leq & e ^ {-\alpha (nT + t) }\|x_\alpha (0) - x(0)\| + (1 - e ^ {- \alpha (nT + t)} )\|x\|_{L^{\infty}}. \nonumber \end{eqnarray} By passing to the limit for $n \to \infty$ we obtain $$\|x_\alpha - x\|_{L^{\infty}} \leq \|x\|_{L^{\infty}},\quad \alpha >0,$$ and hence $$\|x_\alpha\|_{L^{\infty}} \leq 2\|x\|_{L^{\infty}}, \quad \alpha >0.$$ Since $A$ is linear we can write \begin{eqnarray*} \lefteqn{\frac{\alpha}{h} (x_\alpha (t+h)-x_\alpha (t)) + \frac {1}{h}(x_\alpha '(t+h) -x_\alpha'(t)) + \frac{1}{h}A(x_\alpha (t+h) -x_\alpha(t))}\\ & = & \frac{1}{h}(f(t+h) -f(t)),\quad t < t+h, \; \alpha >0,\hspace{4cm} \end{eqnarray*} and for $t < t+h$, $$\frac{1}{h}(x'(t+h) - x'(t))+\frac{1}{h} A(x(t+h)- x(t)) = \frac{1}{h}(f(t+h) - f(t)).$$ For every $h>0$ denote by $y_{\alpha,h}$, $y_h$ and $g_h$ the periodic functions: \begin{gather*} y_{\alpha, h}(t) = \frac{1}{h}(x_\alpha(t+h) - x_\alpha (t)), \quad t \in \mathbb{R},\; \alpha >0, \\ y_h(t) = \frac{1}{h}(x(t+h) - x(t)), \quad t \in \mathbb{R}, \\ g_h(t) = \frac{1}{h}(f(t+h) - f(t)), \quad t \in \mathbb{R}, \end{gather*} and hence we have \begin{gather*} \alpha y_{\alpha, h}(t) + y_{\alpha , h}'(t) + Ay_{\alpha , h}(t) = g_h(t), \quad t \in \mathbb{R}, \\ y_h'(t) + Ay_h(t) = g_h(t), \quad t \in \mathbb{R}. \end{gather*} By the same computations we get $$\|y_{\alpha,h} (t) - y_h(t)\| \leq e ^{-\alpha t} \|y_{\alpha ,h}(0) - y_h(0) \| + \int_0 ^t \alpha e ^ {-\alpha (t-s)} \|y_h(s)\|ds.$$ Now by passing to the limit for $h\to 0$ we deduce \begin{eqnarray} \|x_\alpha '(t) - x'(t)\|& \leq & e ^{-\alpha t} \|x_\alpha '(0) - x'(0) \| + \int_0 ^t \alpha e ^ {-\alpha (t-s)} \|x'(s)\|ds \nonumber \\ & \leq & e ^ {-\alpha t} \|x_\alpha '(0) - x'(0)\| + (1 - e ^ {-\alpha t}) \|x'\|_{L^\infty},\quad t \in [0,T].\nonumber \end{eqnarray} By the periodicity we obtain as before that \begin{eqnarray} \|x_\alpha '(t) - x '(t) \| & = & \|x_\alpha '(nT + t) - x'(nT + t) \| \nonumber \\ & \leq & e ^ {-\alpha (nT + t) }\|x_\alpha '(0) - x'(0)\| + (1 - e ^ {- \alpha (nT + t)} )\|x'\|_{L^{\infty}}, \nonumber \end{eqnarray} and hence by passing to the limit for $n \to \infty$ we conclude that $$\|x_\alpha' - x'\|_{L^{\infty}} \leq \|x'\|_{L^{\infty}},\quad \alpha >0.$$ Therefore, $(x_\alpha ')_{\alpha >0}$ is also uniformly bounded in $L^{\infty}$ $$\|x_\alpha '\|_{L^{\infty}} \leq 2\|x'\|_{L^{\infty}}, \quad \alpha > 0.$$ Conversely, the implication {\it (ii) $\to$ (i)} follows by using Arzela-Ascoli's theorem and by passing to the limit for $\alpha \to 0$ in (\ref{MD_AlphaEq}). Let us continue the analysis of the previous example. The semigroup associated to the equation (\ref{MD_AlphaEq}) is given by $$e^{-t(\alpha + A)} = e ^ {-\alpha t } e ^ {-tA} =e ^ {-\alpha t } \begin{pmatrix} \cos t, & \sin t\\ -\sin t, & \cos t \end{pmatrix} \quad t \in \mathbb{R},\; \alpha > 0,$$ and the periodic solution for equation (\ref{MD_AlphaEq}) reads \begin{eqnarray} x_\alpha (t) & = & (1- e ^ {-T(\alpha + A)})^{-1}\int_0 ^T e ^ {-(T-s)(\alpha + A)}f(s)ds\nonumber \\ &&+ \int_0 ^t e ^ {-(t-s)(\alpha + A)}f(s)ds\nonumber \\ & = & \frac{1 - e ^ {-T(\alpha - A)}}{( 1-e ^ {-\alpha T}\cos T)^2 + (e ^ {-\alpha T}\sin T)^2} \int_0 ^T e ^ {-(T-s)(\alpha + A)}f(s)ds \nonumber \\ &&+ \int_0 ^t e ^ {-(t-s)(\alpha + A)} f(s)ds, \quad t >0, \; \alpha >0.\nonumber \end{eqnarray} As we have seen, proving the existence of periodic solutions reduces to finding uniform $L^\infty(]0,T[;H)$ estimates for $(x_\alpha)_{\alpha >0}$ and $(x_\alpha ')_{\alpha >0}$ . Since $A$ is linear bounded operator ($\|A\| _{{\cal L}(H; H)}= 1$) we have \begin{eqnarray} \|x_\alpha '\|_{L^\infty(]0,T[;H)} & = & \|f - \alpha x_\alpha - Ax_\alpha \|_{L^\infty(]0,T[;H)} \nonumber \\ & \leq & \|f\|_{L^\infty(]0,T[;H)} + (\alpha + \|A\|_{{\cal L}(H; H)})\|x_\alpha \|_{L^\infty(]0,T[;H)}, \alpha >0,\nonumber \end{eqnarray} and hence in this case it is sufficient to find only uniform $L^\infty(]0,T[;H)$ estimates for $(x_\alpha )_{\alpha >0}$ or uniform estimates for $(x_\alpha (0))_{\alpha > 0}$ in $H$.\\ {\bf Case 1:} $T = 2n\pi$, $n \geq 0$. We have $$\lim_{\alpha \to 0}x_\alpha (0) = \lim_{\alpha \to 0} \frac{1}{1 - e ^ {-\alpha T}} \int_0 ^T e ^ {- (T-s)(\alpha + A)} f(s)ds.$$ If $\int_0 ^T e ^ {-(T-s)A}f(s)ds \neq 0$ , then $(x_\alpha(0))_{\alpha > 0}$ is not bounded. In fact since $e ^ {-2n\pi A} = 1$ it is easy to check that equation (\ref{Example}) does not have any periodic solution. If $\int_0 ^T e ^ {-(T-s)A}f(s)ds = 0$ then every solution of (\ref{Example}) is $T$-periodic and $(x_\alpha(0))_{\alpha >0}$ is convergent for $\alpha \to 0$: \begin{eqnarray} \lim_{\alpha \to 0} x_\alpha(0) & = & \lim_{\alpha \to 0} \frac{\int_0 ^T (e ^ {-\alpha (T-s)} -1 ) e ^ {-(T-s)A} f(s) ds}{1 - e ^ {-\alpha T}}\nonumber \\ & = & - \int_0 ^T \frac {T-s}{T} e ^ {-(T-s)A} f(s) \nonumber \\ & = & \frac {1}{T}\int_0 ^T s e ^ {-(T-s)A} f(s) \nonumber . \end{eqnarray} {\bf Case 2:} $T \neq 2n\pi$ for all$n \geq 0$. In this case $(1 - e ^ {-TA})$ is invertible and $(x_\alpha (0))_{\alpha >0}$ converges to $x(0)$ where $x$ is the unique $T$-periodic solution of (\ref{Example}): \begin{eqnarray} \lim_{\alpha \to 0 } x_\alpha (0) & = & \lim_{\alpha \to 0} (1 - e ^ {-T(\alpha + A)})^{-1} \int_0 ^T e ^ { - (T-s)(\alpha + A)} f(s) ds \nonumber \\ & = & (1 - e ^ {-TA})^{-1} \int_0 ^T e ^ { - (T-s)A} f(s) ds\nonumber \\ & = & \frac{1}{2 \sin (\frac {T}{2})}\int_0 ^T e ^ {-(\frac{T+\pi }{2} - s)A} f(s) ds. \nonumber \end{eqnarray} We state now our main result of existence in the linear and symmetric case. \begin{theorem} \label{MD_NecAndSufLinSym} Assume that $A:D(A)\subset H \to H$ is a linear maximal monotone and symmetric operator and $f\in C^1([0,T];H)$ is a $T$-periodic function. Then the necessary and sufficient condition for the existence of periodic solutions for (\ref{MD}) is given by $$\langle f\rangle:=\frac{1}{T}\int_0 ^T f(t)dt \in \mathop{\rm Range}(A).$$ In this case we have the estimates: $$\|x\|_{L^\infty(]0,T[;H)} \leq \|A^{-1}\langle f\rangle\| + \frac{\sqrt T}{2} \|f\|_{L^2(]0,T[;H)} + \frac{T}{2} \|f'\|_{L^1(]0,T[;H)},$$ and $$\|x'\|_{L^\infty(]0,T[;H)} \leq \frac {1}{\sqrt T}\|f\|_{L^2(]0,T[;H)} + \|f'\|_{L^1(]0,T[;H)},$$ and the solution is unique up to a constant in $A^{-1}(0)$. \end{theorem} \paragraph{Proof} The condition is necessary (see Proposition \ref{MD_NecCond}). Let us show now that it is also sufficient. Consider the $T$-periodic solutions $(x_\alpha)_{\alpha > 0}$ for $$\alpha x_\alpha (t) + x_\alpha '(t) + Ax_\alpha (t) = f(t), \quad t \in [0,T], \; \alpha >0.$$ First we prove that $(x_\alpha )_{\alpha > 0}$ is uniformly bounded in $C^1([0,T];H)$. Let us multiply by $x_\alpha '(t)$ and integrate on a period: $$\int_0 ^T \|x_\alpha '(t)\|^2dt + \int_0 ^T \alpha (x_\alpha (t), x_\alpha '(t)) + (Ax_\alpha (t), x_\alpha '(t))dt = \int_0 ^T (f(t),x_\alpha '(t))dt.$$ Since $A$ is symmetric and $x_\alpha$ is $T$-periodic we have \begin{eqnarray*} \lefteqn{\int_0 ^T \alpha (x_\alpha (t), x_\alpha '(t)) + (Ax_\alpha (t), x_\alpha '(t))dt }\\ &= & \int_0 ^T \frac{\alpha }{2}\frac {d}{dt} \|x_\alpha (t)\|^2 dt + \int_0 ^T \frac{1}{2}\frac{d}{dt}(Ax_\alpha (t),x_\alpha (t)) dt \\ & = & \frac{1}{2}\left \{\alpha \|x_\alpha (t)\|^2 + (Ax_\alpha (t),x_\alpha (t)) \right \}|_0 ^T = 0. \end{eqnarray*} Finally we get $$\|x_\alpha ' \|_{L^2(]0,T[;H)} ^2 \leq (f, x_\alpha ')_{L^2(]0,T[;H)} \leq \|f\|_{L^2(]0,T[;H)}\cdot \|x_\alpha '\|_{L^2(]0,T[;H)},$$ and hence $$\|x_\alpha '\|_{L^2(]0,T[;H)} \leq \|f\|_{L^2(]0,T[;H)}, \quad \alpha >0.$$ Therefore we can write $$\label{EstDer1} \min_{t \in [0,T]} \|x_\alpha '(t)\|\leq \frac {1}{\sqrt T}\|x_\alpha '\|_{L^2(]0,T[;H)} \leq \frac {1}{\sqrt T}\|f\|_{L^2(]0,T[;H)}.$$ As seen before, since $A$ is linear we can write \begin{eqnarray*} \frac{\alpha }{h}(x_\alpha (t+h) -x_\alpha (t)) + \frac{1}{h}(x_\alpha '(t+h)-x_\alpha '(t)) &&\\ + \frac{1}{h}A(x_\alpha (t+h) - x_\alpha (t))&=& \frac{1}{h}(f(t+h)- f(t)), \end{eqnarray*} and by standard calculations for $s < t$ and $h >0$, we get \begin{eqnarray*} \lefteqn{\frac{1}{h}\|x_\alpha (t+h) - x_\alpha (t)\|} \\ & \leq & e ^ {-\alpha (t-s)}\frac{1}{h}\|x_\alpha (s+ h)- x_\alpha (s)\| + \int_s ^t e ^ {-\alpha (t - \tau)}\frac{1}{h}\|f(\tau + h) - f(t)\|d\tau\,. \end{eqnarray*} Passing to the limit for $h \to 0$ we deduce \begin{eqnarray} \label{EstDer2} \|x_\alpha '(t)\| &\leq & e ^ {-\alpha (t-s)}\|x_\alpha '(s) \|+ \int_s ^t e ^ {-\alpha (t - \tau)}\|f'(\tau)\|d\tau \nonumber \\ & \leq & \|x_\alpha '(s)\| + \int_s ^t \|f'(\tau)\|d\tau, \quad s \leq t, \; \alpha >0. \end{eqnarray} From (\ref{EstDer1}) and (\ref{EstDer2}) we conclude that the functions $(x_\alpha ')_{\alpha >0}$ are uniformly bounded in $L^\infty(]0,T[;H)$: $$\|x_\alpha '\|_{L^\infty(]0,T[;H)} \leq \frac {1}{\sqrt T}\|f\|_{L^2(]0,T[;H)} + \|f'\|_{L^1(]0,T[;H)}, \quad \alpha >0.$$ As shown before, since $A$ is linear and $x_\alpha$ is $T$-periodic we have also $$\label{Average1} \alpha \langle x_\alpha\rangle + A\langle x_\alpha\rangle = \langle f\rangle.$$ By the hypothesis there is $x_0\in D(A)$ such that $\langle f\rangle= Ax_0$ and hence $$\|\langle x_\alpha\rangle \| = \| (\alpha + A)^{-1}\langle f\rangle\| = \|(\alpha + A)^{-1}Ax_0 \|\leq \|x_0\|, \quad \alpha > 0.$$ Now it is easy to check that $(x_\alpha)_{\alpha > 0}$ is uniformly bounded in $L^\infty(]0,T[;H)$: \begin{eqnarray*} \|x_\alpha(t) - \langle x_\alpha\rangle \| & = & \Big\|\frac{1}{T}\int_0 ^T (x_\alpha (t) - x_\alpha (s))ds \Big\| \\ & = & \Big\| \frac{1}{T}\int_0 ^T \int_s ^t x_\alpha '(\tau) d \tau ds \Big\| \\ & \leq & \frac{\sqrt T }{2}\|f\|_{L^2(]0,T[;H)} + \frac{T}{2} \|f'\|_{L^1(]0,T[;H)},\nonumber \end{eqnarray*} and thus \begin{eqnarray} \|x_\alpha\|_{L^\infty(]0,T[;H)} & \leq & \|\langle x_\alpha\rangle \| + \frac{\sqrt T}{2} \|f\|_{L^2(]0,T[;H)} + \frac{T}{2} \|f'\|_{L^1(]0,T[;H)} \nonumber \\ & \leq & \|x_0\| + \frac{\sqrt T}{2} \|f\|_{L^2(]0,T[;H)} + \frac{T}{2} \|f'\|_{L^1(]0,T[;H)}. \nonumber \end{eqnarray} Now we can prove that $(x_\alpha)_{\alpha >0}$ is convergent in $C^1([0,T];H)$. Indeed, by taking the difference between the equations (\ref{MD_AlphaEq}) written for $\alpha, \beta > 0$, after multiplication by $x_\alpha '(t) - x_\beta '(t)$ and integration on $[0,T]$ we get \begin{eqnarray*} \lefteqn{\int_0 ^T \{ \alpha (x_\alpha (t) - x_\beta (t), x_\alpha '(t) - x_\beta '(t)) }\\ \lefteqn{+ \|x_\alpha'(t) - x_\beta '(t) \|^2 + ( A(x_\alpha (t) - x_\beta (t)), x_\alpha '(t) - x_\beta '(t)) \} dt }\\ & = & - (\alpha - \beta ) \int_0 ^T ( x_\beta (t), x_\alpha '(t) - x_\beta '(t)) dt. \end{eqnarray*} Since $A$ is symmetric, $x_\alpha$ and $x_\beta$ are $T$-periodic and uniformly bounded in $L^\infty(]0,T[;H)$ we deduce that $$\|x_\alpha ' - x_\beta ' \|_{L^2(]0,T[;H)} \leq |\alpha - \beta |\cdot \sup _{\gamma > 0} \|x_\gamma \|_{L^2(]0,T[;H)},$$ or $$\|x_\alpha ' - x_\beta '\|_{L^\infty(]0,T[;H)} \leq \frac {|\alpha - \beta |}{\sqrt T}\cdot \sup_{\gamma >0} \|x_\gamma \|_{L^2(]0,T[;H)} + |\alpha - \beta | \cdot \sup_{\gamma >0} \|x_\gamma ' \|_{L^1(]0,T[;H)},$$ and therefore $(x_\alpha ')_{\alpha >0}$ converges in $C([0,T];H)$. We already know that $(\langle x_\alpha\rangle )_{\alpha >0} = ((\alpha + A)^{-1}\langle f\rangle)_{\alpha >0}$ is bounded in $H$ and by the Proposition \ref{Comp} it follows that $(\langle x_\alpha\rangle )_{\alpha >0}$ is convergent to the element of minimal norm in $A^{-1}\langle f\rangle$. We have $$x_\alpha (t) = x_\alpha (0) + \int_0 ^t x_\alpha '(s) ds, \quad t \in \mathbb{R}, \alpha >0.$$ By taking the average we deduce that $x_\alpha (0) = \langle x_\alpha\rangle - <\int_0 ^t x_\alpha '(s)ds>$ and therefore, since $(x_\alpha ')_{\alpha >0}$ is uniformly convergent, it follows that $(x_\alpha(0))_{\alpha >0}$ is also convergent. Finally we conclude that $(x_\alpha)_{\alpha >0}$ is convergent in $C^1([0,T];H)$ to the periodic solution $x$ for (\ref{MD}) such that $$is the element of minimal norm in A^{-1}\langle f\rangle. Before analyzing the periodic solution for the heat equation, following an idea of \cite{Har}, let us state the following proposition. \begin{prop} \label{asympt} Assume that A:D(A)\subset H \to H is a linear maximal monotone and symmetric operator and f\in C^1([0,T];H) is a T-periodic function. Then for every x_0 \in D(A) we have $$\label{dist} \lim_{t \to \infty } \frac{1}{T}(x(t+ T;0,x_0) - x(t;0,x_0)) = \langle f\rangle - \projf,$$ where x(\cdot; 0, x_0) represents the solution of (\ref{MD}) with the initial data x_0 and R(A) is the range of A. \end{prop} \begin{remark} A being maximal monotone, A^{-1} is also maximal monotone and therefore \overline{D(A^{-1})} = \overline{R(A)} is convex. \end{remark} \paragraph{Proof of Proposition \ref{asympt}.} Consider x_0\in D(A) and denote by x(\cdot) the corresponding solution. By integration on [t,t + T] we get $$\label{aveasy} \frac{1}{T}(x(t+T) - x(t)) + A\Big(\frac{1}{T}\int_t ^{t + T } x(s)ds\big) = \langle f\rangle.$$ For each \alpha > 0 consider x_\alpha \in D(A) such that \alpha x_\alpha + Ax_\alpha = \langle f\rangle. Denoting by y(\cdot) the function y(t) = \frac{1}{T}\int_t ^{t+T} x(s)ds, \quad t \geq 0, equation (\ref{aveasy}) writes$$ y'(t) + Ay(t) = \alpha x_\alpha + Ax_\alpha, \quad t \geq 0, \; \alpha > 0. $$Let us search for y of the form y_1 + y_2 where$$ y_1'(t) + Ay_1(t) = \alpha x_\alpha, \quad t \geq 0, $$with the initial condition y_1(0) = 0 and $$\label{y2} y_2'(t) + Ay_2(t) = Ax_\alpha, \quad t \geq 0,$$ with the initial condition y_2(0) = y(0) = \frac{1}{T}\int_0 ^T x(t)dt. We are interested on the asymptotic behaviour of Ay(t) = Ay_1(t) + Ay_2(t) for large t. We have \begin{eqnarray} y_1(t) & = & e^{-tA}y_1(0) + \int_0 ^t e^{-(t-s)A}\alpha x_\alpha ds\nonumber \\ & = & \int_0 ^t e^{-(t-s)A}\alpha x_\alpha ds, \nonumber \end{eqnarray} and therefore, $Ay_1(t) = \int_0 ^t Ae^{-(t-s)A}\alpha x_\alpha ds = e^{-(t-s)A}\alpha x_\alpha \Big|_0 ^t = (1 - e^{-tA}) \alpha x_\alpha .$ By the other hand, after multiplication of (\ref{y2}) by y_2'(t) = (y_2(t) - x_\alpha)' we get$$ \|y_2'(t)\|^2 + (A(y_2(t) - x_\alpha), (y_2(t)-x_\alpha)') = 0, \quad t \geq 0. $$Since A is symmetric, after integration on [0,t] we obtain $\int_0 ^t \|y_2'(s)\|^2 ds + \frac{1}{2}( A(y_2(t) - x_\alpha), y_2(t) - x_\alpha) = \frac{1}{2}( A(y_2(0) - x_\alpha), y_2(0) - x_\alpha),$ and therefore, by the monotony of A it follows that$$ \int_0 ^ \infty \|y_2'(t)\|^2 dt \leq \frac{1}{2} (A(y_2(0) - x_\alpha ) , y_2(0) - x_\alpha ). $$Thus \lim_{t \to \infty } y_2'(t) = 0 and by passing to the limit in (\ref{y2}) we deduce that \lim_{t \to \infty} Ay_2(t) = \lim_{ t \to \infty } ( Ax_\alpha - y_2'(t)) = Ax_\alpha. Finally we find that \begin{eqnarray} \label{sconv} \lefteqn{\lim_{t\to\infty} \big\{ \frac{1}{T}( x(t + T ) - x(t) ) - e ^ {- t A} \alpha x_\alpha \big\} }\nonumber\\ & = & \lim_{t\to\infty} \{ y'(t) - e ^ { - t A } \alpha x_\alpha \} \nonumber \\ & = & \lim_{t\to\infty} \{ \langle f\rangle - Ay(t) - e ^ { - t A} \alpha x_\alpha \} \nonumber \\ & = & \lim_{t\to\infty} \{ \langle f\rangle - Ay_1(t) - Ay_2(t) - e ^ {- t A} \alpha x_\alpha \} \nonumber \\ & = & \langle f\rangle - \alpha x_\alpha - A x_\alpha = 0, \quad \alpha > 0. \end{eqnarray} Now let us put y_\alpha = A x_\alpha and observe that  y _\alpha + \alpha A ^ {-1} y_\alpha = Ax_\alpha + \alpha x_\alpha = \langle f\rangle, \quad \alpha > 0. Therefore, \begin{eqnarray} \lim_{\alpha \searrow 0} y_\alpha & = & \lim_{\alpha \searrow 0} ( 1 + \alpha A ^{-1}) ^ {-1} \langle f\rangle \nonumber \\ & = & \lim_{\alpha \searrow 0} J_\alpha ^ {A^{-1}} \langle f\rangle \nonumber \\ & = & \mathop{\rm Proj}\nolimits_{ \overline { D(A^{-1})}} \langle f\rangle \nonumber \\ & = & \projf, \nonumber \end{eqnarray} and it follows that $\lim_{\alpha \searrow 0} \alpha x_\alpha = \lim_{\alpha \searrow 0} ( \langle f\rangle - Ax_\alpha) = \lim_{\alpha \searrow 0} ( \langle f\rangle - y_\alpha) = \langle f\rangle - \projf.$ Since \mathop{\rm Graph}(A) is closed and [\alpha x_\alpha, \alpha y_\alpha] = [\alpha x_\alpha, A(\alpha x_\alpha)] \in A, \alpha > 0, by passing to the limit for \alpha \searrow 0 we deduce that \langle f\rangle - \projf \in D(A) and A(\langle f\rangle - \projf ) = 0. It is easy to see that we can pass to the limit for \alpha \searrow 0 in (\ref{sconv}). Indeed, for \varepsilon > 0 let us consider \alpha _{\varepsilon} > 0 such that \|\lim_{\alpha \searrow 0} \alpha x_\alpha - \alpha _\varepsilon x_{\alpha_{\varepsilon }} \| < \frac{\varepsilon}{2}. We have \begin{eqnarray*} \lefteqn{\big \| \frac{1}{T} ( x(t + T) - x(t) ) - e ^ {-t A} \lim_{\alpha \searrow 0} \alpha x_\alpha \big \| }\\ & \leq & \big\| \frac{1}{T} ( x(t + T) - x(t) ) - e ^ {-t A} \alpha_\varepsilon x_{ \alpha_\varepsilon }\big \| + \big \| e ^ {-t A} \alpha_\varepsilon x_{ \alpha_\varepsilon } - e ^ {- t A} \lim_{\alpha \searrow 0} \alpha x_\alpha \big \| \\ & \leq & \big \| \frac{1}{T} ( x(t + T) - x(t) ) - e ^ {-t A} \alpha_\varepsilon x_{\alpha_\varepsilon } \big \| + \| \alpha_\varepsilon x_{ \alpha_\varepsilon } - \lim_{\alpha \searrow 0} \alpha x_\alpha \| \\ & \leq & \frac{\varepsilon}{2} + \frac{\varepsilon}{2} = \varepsilon, \quad t \geq t(\alpha_\varepsilon, \frac{\varepsilon}{2}) = t (\varepsilon), \end{eqnarray*} and thus$$ \lim_{t\to\infty} \{ \frac{1}{T} ( x(t+ T ) - x(t) ) - e ^ {- t A} ( \langle f\rangle - \projf) \} = 0. $$But  e ^ {- t A} ( \langle f\rangle - \projf )  does not depend on t \geq 0: \begin{eqnarray*} \frac{d}{dt} e ^ {-t A} ( \langle f\rangle - \projf ) & = & - A e ^ { - t A } ( \langle f\rangle - \projf ) \nonumber \\ & = & - e ^ { - t A } A ( \langle f\rangle - \projf ) = 0, \end{eqnarray*} and thus the previous formula reads$$ \lim_{t\to\infty} \frac{1}{T} ( x( t + T ) - x(t) ) = \langle f\rangle - \projf. $$\begin{remark} Under the same hypothesis as above we can easily check that$$ \inf_{ x_0 \in D(A)} \frac{\|x(T; 0, x_0) - x_0\|}{T} = \|\langle f\rangle - \projf \| = \mathop{\rm dist}(\langle f\rangle, \overline{R(A)}). $$\end{remark} \subsection{Periodic solutions for the heat equation} Let \Omega \subset \mathbb{R} ^d, d \geq 1, be an open bounded set with \partial \Omega \in C^2. Consider the heat equation $$\label{Heat} \frac{\partial u}{\partial t}(t,x) - \Delta u (t,x) = f(t,x), \quad (t,x)\in \mathbb{R} \times \Omega,$$ with the Dirichlet boundary condition $$\label{Dirichlet} u(t,x) = g(t,x), \quad (t,x) \in \mathbb{R} \times \partial \Omega,$$ or the Neumann boundary condition $$\label{Neumann} \frac{\partial u }{\partial n} (t,x) = g(t,x), \quad (t,x) \in \mathbb{R} \times \partial \Omega,$$ where we denote by n(x) the outward normal in x\in \partial \Omega. \begin{theorem} Assume that f \in C ^1(\mathbb{R};L^2(\Omega)) is T-periodic and g(t,x) = \frac{\partial u_0}{\partial n}(t,x), (t,x) \in \mathbb{R}\times \partial \Omega where  u_0 \in C^1(\mathbb{R};H^2(\Omega)) \cap C^2(\mathbb{R};L^2(\Omega)) is T-periodic. Then the heat problem (\ref{Heat}), (\ref{Neumann}) has T-periodic solutions u \in C(\mathbb{R};H^2(\Omega)) \cap C^1(\mathbb{R};L^2(\Omega)) if and only if$$ \int_{\partial \Omega}\int_0 ^T g(t,x)dtd\sigma + \int_\Omega \int_0 ^Tf(t,x)dtdx =0. $$In this case the periodic solutions satisfies the estimates \begin{eqnarray} \label{EstimNeumann} \|u' - u_0'\|_{L^\infty([0,T];L^2(\Omega))} & \leq & \frac{1}{\sqrt T}\|f-u_0'+\Delta u_0\|_{L^2(]0,T[;L^2(\Omega))} \nonumber \\ & + & \|f'-u_0''+\Delta u_0 '\|_{L^1(]0,T[;L^2(\Omega))}, \end{eqnarray} and the solution is unique up to a constant. \end{theorem} \paragraph{Proof} Let us search for solutions u = u_0 + v where $$\label{HeatMod} \frac{\partial v}{\partial t}(t,x) - \Delta v(t,x) = f(t,x) - \frac{\partial u_0}{\partial t} (t,x) + \Delta u_0 (t,x), \quad (t,x) \in \mathbb{R} \times \Omega,$$ and $$\label{NeumannMod} \frac{\partial v}{\partial n}(t,x) = g(t,x) - \frac{\partial u_0}{\partial n}(t,x) = 0, \quad (t,x) \in \mathbb{R} \times \partial \Omega.$$ Consider the operator A_N:D(A_N)\subset L^2(\Omega) \to L^2(\Omega) given as$$ A_Nv = -\Delta v $$with domain$$ D(A_N) = \big\{ v \in H^2(\Omega):\frac {\partial v}{\partial n} (x) = 0, \; \forall \; x \in {\partial \Omega} \big\}. $$The operator A_N is linear monotone: \begin{eqnarray} (A_Nv,v) & = & - \int_{\Omega} \Delta v(x)v(x)dx \nonumber \\ & = & -\int_{\partial \Omega } \frac{\partial v}{\partial n}(x) v(x) d\sigma + \int_\Omega \|\nabla v(x)\|^2 dx \nonumber \\ & = & \int_\Omega \|\nabla v(x)\|^2 dx \geq 0, \quad \forall \; v \in D(A_N). \end{eqnarray} Since the equation \lambda v - \Delta v = f has unique solution in D(A_N) for every f\in L^2(\Omega), \lambda > 0 it follows that A_N is maximal (see \cite{brezis}). Moreover, it is symmetric$$ (A_Nv_1, v_2) = \int_\Omega \nabla v_1(x) \cdot \nabla v_2 (x) dx = (v_1, A_Nv_2), \quad \forall \; v_1, v_2 \in D(A_N). $$Note that by the hypothesis the second member in (\ref{HeatMod}) f - u_0'+ \Delta u_0 belongs to C^1(\mathbb{R};L^2(\Omega)). Therefore the Theorem \ref{MD_NecAndSufLinSym} applies and hence the problem (\ref{HeatMod}), (\ref{NeumannMod}) has periodic solutions if and only if there is w\in D(A_N) such that$$ -\Delta w = \frac{1}{T}\int_0 ^T \{f(t) - \frac{du_0}{dt}(t) + \Delta u_0 (t)\}dt. $$Since u_0 is T-periodic we have \int_0 ^T \frac{du_0}{dt}(t) dt = 0 and thus w + \frac{1}{T}\int_0 ^T u_0(t)dt is solution for the elliptic problem$$ -\Delta \Big( w + \frac{1}{T}\int_0 ^T u_0(t)dt\Big) = \frac{1}{T}\int _0 ^T f(t)dt = F, $$with the boundary condition \begin{eqnarray} \frac{\partial }{\partial n}\Big( w + \frac{1}{T}\int_0 ^T u_0(t)dt \Big) & = & \frac{\partial w}{\partial n} + \frac{1}{T}\int_0 ^T \frac{\partial u_0}{\partial n}(t)dt\nonumber \\ & = & \frac{1}{T}\int_0 ^T g(t)dt = G. \nonumber \end{eqnarray} As known from the general theory of partial differential equations (see \cite{brezis}) this problem has solution if and only if \int_{\partial \Omega} G(x)d\sigma + \int_\Omega F(x)dx = 0 or$$ \int_{\partial \Omega}\int_0 ^T g(t,x)dtd\sigma + \int_\Omega \int_0 ^T f(t,x)dtdx = 0. $$The estimate (\ref{EstimNeumann}) follows from Theorem \ref{MD_NecAndSufLinSym}. For the heat equation with Dirichlet boundary condition we have the following existence result. \begin{theorem} Assume that f \in C ^1(\mathbb{R};L^2(\Omega)) is T-periodic and g(t,x) = u_0(t,x), (t,x) \in \mathbb{R}\times \partial \Omega where  u_0 \in C^1(\mathbb{R};H^2(\Omega)) \cap C^2(\mathbb{R};L^2(\Omega)) is T-periodic. Then the heat problem (\ref{Heat}), (\ref{Dirichlet}) has an unique T-periodic solution u in \\ C(\mathbb{R};H^2(\Omega)) \cap C^1(\mathbb{R};L^2(\Omega)) and there is a constant C(\Omega) such that \begin{eqnarray} \label{EstimDirichlet1} \|u - u_0\|_{L^\infty([0,T];L^2(\Omega))} & \leq & C(\Omega) \|f + \Delta u_0\|_{L^\infty([0,T];L^2(\Omega))} \nonumber \\ &&+ \frac{\sqrt T}{2}\|f- u_0' + \Delta u_0\|_{L^2(]0,T[;L^2(\Omega))} \nonumber \\ &&+ \frac{T}{2}\|f' - u_0'' + \Delta u_0'\|_{L^1(]0,T[;L^2(\Omega))}, \end{eqnarray} and \begin{eqnarray} \label{EstimDirichlet2} \|u' - u_0'\|_{L^\infty([0,T];L^2(\Omega))} & \leq & \frac{1}{\sqrt T}\|f-u_0'+\Delta u_0\|_{L^2(]0,T[;L^2(\Omega))} \nonumber \\ &&+ \|f'-u_0''+\Delta u_0 '\|_{L^1(]0,T[;L^2(\Omega))}. \end{eqnarray} \end{theorem} \paragraph{Proof} This time we consider the operator A_D :D(A_D)\subset L^2(\Omega) \to L^2(\Omega)  given as$$ A_Dv = -\Delta v $$with domain$$ D(A_D) = \big\{v \in H^2(\Omega) : v(x) = 0 ,\; \forall x \in \partial \Omega \big\}, $$As before A_D is linear, monotone and symmetric and thus our problem reduces to the existence for an elliptic equation:$$ -\Delta w = \frac {1}{T}\int_0 ^T \{f(t) + \Delta u_0(t)\}dt, $$with homogenous Dirichlet boundary condition w = 0 on \partial \Omega. Since the previous problem has a unique solution verifying \begin{eqnarray} \|w\|_{L^2(\Omega)} &\leq& C(\Omega) \|\frac{1}{T}\int_0 ^T \{ f(t) + \Delta u_0(t)\}dt \|_{L^2(\Omega)} \nonumber\\ &\leq& C(\Omega) \|f + \Delta u_0 \|_{L^\infty([0,T];L^2(\Omega))}, \label{EstimDirichlet3} \end{eqnarray} we prove the existence for (\ref{Heat}), (\ref{Dirichlet}). Here we denote by C(\Omega) the Poincar\'e's constant,$$ \Big(\int_\Omega |w(x)|^2dx \Big) ^{1/2}\leq C(\Omega) \Big( \int_\Omega \|\nabla w(x)\|^2 dx \Big) ^{1/2}, \quad \forall w \in H^1_0(\Omega). $$Moreover in this case the operator A_D is strictly monotone. Indeed, by using the Poincar\'e's inequality, for each v \in D(A_D), we have have \begin{eqnarray*} \Big( \int_\Omega |v(x)|^2dx \Big) ^{1/2} \leq C(\Omega) \Big( \int _\Omega \|\nabla v(x)\|^2 dx \Big) ^{1/2} = C(\Omega) (A_Dv,v)^{1/2}. \end{eqnarray*} Hence if (A_Dv,v) = 0 we deduce that v=0. Therefore, by Proposition \ref{MD_Uniq} we deduce the uniqueness of the periodic solution for (\ref{Heat}), (\ref{Dirichlet}). The estimates of the solution follow immediately from (\ref{EstimDirichlet3}) and Theorem \ref{MD_NecAndSufLinSym}. \subsection{Non-linear case} Throughout this section we will consider evolution equations associated to sub-differential operators. Let \varphi : H \to ]-\infty , + \infty ] be a lower-semicontinuous proper convex function on a real Hilbert space H. Denote by \partial \varphi \subset H \times H the sub-differential of \varphi , $$\label{SubDiffOp} \partial \varphi (x) = \big\{ y \in H; \; \varphi (x) - \varphi (u) \leq (y, x-u), \; \forall u \in H\big\},$$ and denote by D(\varphi) the effective domain of \varphi: \begin{equation*} %\label{} D(\varphi) = \big\{ x \in H; \; \varphi (x) < + \infty \big\}. \end{equation*} Under the previous assumptions on \varphi we recall that A = \partial \varphi is maximal monotone in H\times H and \overline{D(A)} = \overline{D(\varphi)}. Consider the equation \label{SubDiff} x'(t) + \partial \varphi x(t) \ni f(t), \quad 0 0 we consider the unique periodic solution x_\alpha for \label{AlphaSubDiff} \alpha x_\alpha (t)+ x_\alpha '(t) + \partial \varphi x_\alpha (t) = f(t), \quad 00} is uniformly bounded in L^2(]0,T[;H). Indeed, after multiplication by x_\alpha '(t) we obtain$$ \int_0 ^T \|x_\alpha '(t)\|^2dt + \int_0 ^T \{\alpha (x_\alpha (t),x_\alpha '(t)) + ( \partial \varphi x_\alpha (t), x_\alpha '(t))\}dt = \int_0 ^T (f(t),x_\alpha '(t))dt. $$Since x_\alpha  is T-periodic we deduce that \begin{eqnarray} \lefteqn{ \int_0 ^T \{\alpha (x_\alpha (t),x_\alpha '(t)) + ( \partial \varphi x_\alpha (t), x_\alpha '(t))\}dt }\nonumber\\ & = & \int_0 ^T \frac{d}{dt}\{ \frac{\alpha}{2}\|x_\alpha (t)\|^2 + \varphi (x_\alpha(t)) \}dt \nonumber\\ & = & \frac{\alpha}{2}\|x_\alpha (t)\|^2 + \varphi (x_\alpha(t)) |_0 ^T = 0. \end{eqnarray} Therefore, \|x_\alpha '\|_{L^2(]0,T[;H)}^2 \leq (f,x_\alpha ')_{L^2(]0,T[;H)} and thus$$ \|x_\alpha '\|_{L^2(]0,T[;H)} \leq \|f\|_{L^2(]0,T[;H)}, \quad \alpha >0. $$Before estimate (x_\alpha )_{\alpha > 0}, let us check that (\alpha x_\alpha )_{\alpha > 0} is bounded. By taking x_0 \in D(\partial \varphi), after standard calculation we find that $$\label{NonLinEvo} \|x_\alpha (t) - x_0\| \leq e ^{-\alpha t } \|x_\alpha(0) - x_0\| + \int_0 ^t e ^{-\alpha (t-s)} \|f(s) - \alpha x_0 - \partial \varphi (x_0)\|\,ds.$$ Since x_\alpha is T-periodic we can write \begin{eqnarray*} \|x_\alpha (t) - x_0\| & = & \lim_{n\to\infty} \| x_\alpha (nT + t) - x_0\| \nonumber \\ & \leq & \lim_{n\to\infty} \Big\{e ^{-\alpha (nT + t)}\|x_\alpha (0) - x_0\| \\ &&+ \int_0 ^{nT + t} e ^ {-\alpha (nT + t - s)} \|f(s) - \alpha x_0 - \partial \varphi (x_0)\|\,ds \Big\} \\ & \leq & \frac{1}{\alpha}\|\alpha x_0 + \partial \varphi (x_0) \| + \lim_{n\to\infty} \int_0 ^{nT + t} e ^ {-\alpha (nT + t - s)} \|f(s)\|\,ds \nonumber \\ & \leq & \frac{1}{\alpha}\|\alpha x_0 + \partial \varphi (x_0) \|\\ &&+ \lim_{n\to\infty} \left \{ \left [ 1 + e ^{-\alpha t} ( e ^{-\alpha (n-1)T} + \dots+ e ^{-\alpha T} + 1 ) \right ] \cdot \|f\|_{L^1} \right \} \nonumber \\ & = & \frac{1}{\alpha}\|\alpha x_0 + \partial \varphi (x_0) \| + \big( 1 + \frac{e ^{-\alpha t} }{1 - e ^{-\alpha T}}\big) \cdot \|f\|_{L^1(]0,T[;H)} \nonumber \\ & \leq & C_1(x_0, T, \|f\|_{L^2(]0,T[;H)}) \big( 1 + \frac{1}{\alpha} \big), \quad 0 \leq t \leq T, \quad \alpha >0. \nonumber \end{eqnarray*} It follows that \alpha \|x_\alpha (t)\| \leq C_2(x_0, T, \|f\|_{L^2(]0,T[;H)}), 0\leq t \leq T, 0 < \alpha <1. Now we can estimate x_\alpha, \; \alpha >0. After multiplication by x_\alpha (t) and integration on [0,T] we obtain $$\label{intsub} \int_0 ^T \alpha \|x_\alpha (t) \|^2 dt + \int_0 ^T ( \partial \varphi ( x_\alpha (t)), x_\alpha (t)) dt = \int_0 ^T ( f(t), x_\alpha (t)) dt.$$ We have$$ \varphi (x_0) \geq \varphi ( x_\alpha (t) ) + ( \partial \varphi ( x_\alpha (t)), x_0 - x_\alpha (t)), \quad t \in [0,T], \; \alpha >0. $$Thus we deduce that for \alpha > 0,$$ \int_0 ^T ( \partial \varphi ( x_\alpha (t)), x_\alpha(t)) dt \geq \int_0 ^T \varphi( x_\alpha (t)) dt + \int_0 ^T \{ ( \partial \varphi ( x_\alpha (t)), x_0) - \varphi(x_0)\}dt. $$On the other hand for 0 < \alpha <1, \begin{eqnarray} \int_0 ^T (\partial \varphi ( x_\alpha (t)), x_0)\, dt & = & \int_0 ^T (f(t) - \alpha x_\alpha (t) - x'_\alpha (t) , x_0)\,dt \nonumber \\ & = & \Big(\int_0 ^T f(t) \; dt, x_0\Big) - \int_0 ^T ( \alpha x_\alpha (t), x_0) \; dt \nonumber \\ & \geq & - C_3(x_0 , T, \|f\|_{L^2(]0,T[;H)}). \nonumber \end{eqnarray} Therefore, $$\label{C4} \int_0 ^T ( \partial \varphi ( x_\alpha (t)) , x_\alpha (t) ) dt \geq \int_0 ^T \varphi (x_\alpha (t)) dt - C_4(x_0, T, \|f\|_{L^2(]0,T[;H)}).$$ Combining (\ref{intsub}) and (\ref{C4}) we deduce that \begin{eqnarray} \label{comb1} \int_0 ^T \varphi ( x_\alpha (t)) dt & \leq & C_4 + \int_0 ^ T ( \partial \varphi ( x_\alpha (t)), x_\alpha (t)) dt \nonumber \\ & = & C_4 + \int_0 ^T ( f(t), x_\alpha (t)) dt - \int_0 ^T \alpha \|x_\alpha (t)\|^2 dt \nonumber \\ & \leq & C_4 + \int_0 ^T ( f(t), x_\alpha (t)) dt, \quad 0<\alpha <1. \end{eqnarray} On the other hand we have \begin{eqnarray*} \lefteqn{\int_0 ^T ( f(t), x_\alpha (t) ) dt }\\ & = & \int_0 ^T ( f(t) - \langle f\rangle, x_\alpha (t)) dt + \Big( \int_0 ^T x_\alpha (t) dt, \langle f\rangle \Big) \nonumber \\ & = & \int_0 ^T ( f(t) - \langle f\rangle, x_\alpha (0) + \int_0 ^ t x_\alpha '(s)\,ds) dt + T ( \langle x_\alpha\rangle , \langle f\rangle) \nonumber \\ & = & \int_0 ^T ( f(t) - \langle f\rangle, \int_0 ^t x_\alpha '(s) \,ds) dt + T ( \langle x_\alpha\rangle , \langle f\rangle) \nonumber \\ & \leq & \int_0 ^T \|f(t) - \langle f\rangle\| \cdot \Big( \int_0 ^t \|x'_\alpha (s)\|^2\, ds\Big)^{1/2} \cdot t ^{1/2} \, dt + T (\langle x_\alpha\rangle , \langle f\rangle) \nonumber \\ & \leq & \|f - \langle f\rangle \|_{L^2(]0,T[;H)} \cdot \|f\|_{L^2(]0,T[;H)} \cdot \frac{T}{\sqrt {2}} + T(\langle x_\alpha\rangle , \langle f\rangle). \nonumber \end{eqnarray*} Finally we deduce that $$\label{Coer} \int_0 ^T \left \{ \varphi (x_\alpha (t)) - ( x_\alpha (t), \langle f\rangle) \right \} \,dt \leq C_5(x_0, T, \|f\|_{L^2(]0,T[;H)}), \quad 0 < \alpha <1,$$ and thus there is t_\alpha \in [0,T] such that $$\label{LastIneq} \varphi(x_\alpha (t)) - (x_\alpha (t), \langle f\rangle) \leq \frac{C_5}{T}, \quad 0 < \alpha <1.$$ From Hypothesis (\ref{CondForSubDiff}) we get that (x_\alpha (t _\alpha ))_{0 < \alpha < 1} is bounded and therefore from (\ref{NonLinEvo}), for t \in [t_\alpha , t_\alpha + T],$$ \|x_\alpha (t) - x_0\| \leq e ^{-\alpha (t - t_\alpha )} \|x_\alpha (t_\alpha ) - x_0\| + \int_{t_\alpha } ^ t e ^ { - \alpha (t-s) }\|f(s) - \alpha x_0 - \partial \varphi (x_0) \|\, ds. $$we deduce that (x_\alpha)_{0 < \alpha <1} is bounded in L^\infty(]0,T[;H) and that there is x \in L^\infty(]0,T[;H) such that x_\alpha (t) \rightharpoonup x(t) when \alpha goes to 0 for t \in [0,T]. Moreover, from (\ref{LastIneq}) it follows that (\varphi (x_\alpha(t_\alpha)))_{0 < \alpha <1} is bounded from above and we deduce that \begin{eqnarray*} \varphi (x_\alpha (t)) & = & \varphi ( x_\alpha (t_\alpha )) + \int_{t_\alpha } ^ t ( \partial \varphi (x_\alpha (s)), x'_\alpha (s) ) \, ds \nonumber \\ & \leq & \varphi ( x_\alpha (t_\alpha )) + \int_{t_\alpha } ^t ( f(s) - \alpha x_\alpha (s) - x '_\alpha (s), x'_\alpha (s) ) \, ds \nonumber \\ & \leq & C_6 ( x_0 , T, \|f\|_{L^2(]0,T[;H)}), \quad 0 < \alpha <1 \nonumber. \end{eqnarray*} On the other hand, by writing \varphi (x_\alpha (t)) \geq \varphi(x_0) + ( \partial \varphi (x_0), x_\alpha (t) - x_0), 0 \leq t \leq T, \alpha >0 we deduce that \varphi (x_\alpha (t)) is also bounded from below so that finally (\varphi \circ x_\alpha )_{0 < \alpha < 1} is bounded in L^\infty(]0,T[;H). Now, using the second hypothesis of the theorem (every level subset is compact) we deduce that x_\alpha (0) \to x(0) when \alpha  goes to 0 (at least for a subsequence \alpha_n \searrow 0). In fact we can easily check that x_\alpha converges uniformly to x on [0,T] since$$ \|x_\alpha (t) - x_\beta (t) \| \leq \|x_\alpha (0) - x_\beta (0) \| + |\alpha - \beta |\cdot T \cdot \sup_{0 < \gamma < 1} \|x_\gamma \|_{L^\infty(]0,T[;H)}, $$for 0\leq t \leq T, 0 < \alpha , \beta < 1. Now, since \lim_{\alpha \searrow 0} dx_\alpha /dt = dx/dt in the sense of H-valued vectorial distribution on ]0,T[ and (x'_\alpha )_{\alpha >0} is bounded in L^2(]0,T[;H) it follows that x' belongs to L^2(]0,T[;H) and in particular x is absolutely continuous on every compact of ]0,T[ and therefore a.e. differentiable on ]0,T[. To complete the proof we need to show that x(t) \in D(\varphi) a.e. on ]0,T[ and x'(t) + \partial \varphi x(t) \ni f(t) a.e. on ]0,T[. For arbitrarily [u,v] \in \partial \varphi we have$$ \frac{1}{2} e ^ {2\alpha t} \|x_\alpha (t) - u \|^2 \leq \frac{1}{2} e ^ {2\alpha s} \| x_\alpha (s) - u\|^2 + \int_s ^t e ^{2 \alpha \tau }( f(\tau) - \alpha u - v, x_\alpha (\tau) - u)\, d\tau, $$with 0\leq s \leq t \leq T, \alpha >0. Passing to the limit for \alpha \searrow 0 we get$$ \frac{1}{2} \|x (t) - u \|^2 \leq \frac{1}{2} \| x(s) - u\|^2 + \int_s ^t( f(\tau) - v, x (\tau) - u) \, d\tau, \quad 0\leq s \leq t \leq T. $$Thus$$ (x(t) - x(s) , x(s) - u) \leq \frac{1}{2}\|x(t) - u \|^2 - \frac{1}{2} \|x(s) - u \|^2 \leq \int_s ^t( f(\tau) - v, x (\tau) - u) \, d\tau, $$for 0\leq s \leq t \leq T. Since x is a.e. differentiable on ]0,T[ we find that \begin{eqnarray*} (x'(t), x(t) - u ) & = & \lim_{s \nearrow t} \frac{1}{t-s}( x(t) - x(s) , x(s) - u ) \nonumber \\ & \leq & \lim_{ s \nearrow t} \frac{1}{t-s} \int_s ^t ( f(\tau) - v, x(\tau) - u) \, d\tau \nonumber \\ & = & (f(t) - v, x(t) - u), \quad a.e. \;t \in ]0,T[, \; \forall \; [u,v] \in \partial \varphi. \end{eqnarray*} Finally, since \partial \varphi  is maximal monotone and (f(t) - x'(t) - v, x(t) - u) \geq 0 for all [u,v] \in \partial \varphi we deduce that x(t) \in D(\partial \varphi) a.e. on ]0,T[ and x'(t) + \partial \varphi x(t) \ni f(t) a.e. on ]0,T[. Since \varphi  is lower-semicontinuous we also have$$ \varphi (x(t)) \leq \lim_{\alpha \searrow 0} \inf \varphi (x_\alpha (t)) \leq \lim_{\alpha \searrow 0} \inf \|\varphi \circ x_\alpha \|_{L^\infty} \leq \sup_{0 < \gamma < 1} \|\varphi \circ x_\gamma \|_{L ^\infty}. $$As previous, by writing \begin{eqnarray} \varphi(x(t)) & \geq & \varphi (x_0) + (\partial \varphi (x_0), x(t) - x_0) \nonumber \\ & \geq & \varphi (x_0) - \|\partial \varphi (x_0) \| \cdot ( \|x_0\| + \lim_{\alpha \searrow 0} \inf \|x_\alpha (t)\| ) \nonumber \\ & \geq & \varphi (x_0) - \|\partial \varphi (x_0)\| \cdot ( \|x_0\| + \sup_{0 < \gamma < 1} \|x_\gamma \|_{L^{\infty}}), \quad 0 \leq t \leq T,\nonumber \end{eqnarray} we deduce finally that \varphi \circ x \in L ^ \infty (0,T). \begin{remark} \rm If \dim H < +\infty then the level subsets \{x \in H \; ; \; \varphi (x) + \|x\|^2 \leq M\} are compact as bounded sets. \end{remark} \begin{remark} \rm Assume that \varphi : H \to ]-\infty, +\infty] is a lower-semicontinuous proper convex function such that \mathop{\rm Range}(\partial \varphi ) = H which is equivalent to$$ \lim_{ \|x\| \to \infty} \{ \varphi(x) - (x,y) \} = + \infty, \quad \forall y \in H, $$see \cite{Bre}, pp.41. In particular, by taking y = \langle f\rangle we deduce that the hypothesis (\ref{CondForSubDiff}) is verified. \end{remark} \begin{remark} \rm Assume that \varphi is coercive$$ \lim_{ \|x\|\to \infty} \frac{(\partial \varphi (x), x-x_0)}{\|x\|} = +\infty, \quad \forall x_0 \in D(\varphi), $$which is equivalent to \lim_{ \|x\| \to \infty} \frac{\varphi(x)}{\|x\|} = +\infty (see \cite{Bre}, pp.42). Then \mathop{\rm Range}(\varphi) = H because the previous condition is satisfied: \lim_{ \|x\| \to \infty } \{ \varphi (x) - (x,y) \} = +\infty, for all y \in H and therefore (\ref{CondForSubDiff}) is verified. \end{remark} \begin{theorem} \label{NecAndSuffNonLinBis} Suppose that \varphi : H \to ]-\infty, +\infty] is a lower-semicontinuous proper convex function and f \in W^{1,1}(]0,T[;H) such that $$\lim_{\|x\|\to \infty} \{\varphi(x) - (x, \langle f\rangle) \} = +\infty,$$ and every level subset \{x \in H; \; \varphi (x) + \|x\|^2 \leq M\} is compact. Then equation (\ref{SubDiff}) has T-periodic solutions x\in C([0,T];H)\cap W^{1,\infty}(]0,T[;H) which satisfy$$ x(t) \in D(\partial \varphi ), \quad \forall \; t \in [0,T], \quad \frac{d^+}{dt}x(t) + ( \partial \varphi x(t) - f(t) ) ^\circ = 0, \quad \forall \; t \in [0,T], $$where (\partial \varphi - f )^\circ  denote the minimal section of \partial \varphi - f . \end{theorem} \paragraph{Proof} Since W^{1,1}(]0,T[;H) \subset L^2(]0,T[;H) the previous theorem applies. Consider x \in C([0,T];H) \cap W^{1,2}(]0,T[;H) a T-periodic solution for (\ref{SubDiff}). Since \|x'\|_{L^2(]0,T[;H)} \leq \|f\|_{L^2(]0,T[;H)} it follows that there is t ^\star \in ]0,T[ such that x is differentiable in t^\star and \|x'(t ^\star)\| \leq \frac{1}{\sqrt {T}}\|f\|_{L^2(]0,T[;H)}. By standard calculation we find that:$$ \|\frac{1}{h}(x(t+h) - x(t))\| \leq \|\frac{1}{h} ( x ( t ^ \star + h) - x( t ^ \star) ) \| + \int_{ t ^ \star} ^ t \| \frac{1}{h} ( f (\tau + h) - f (\tau) \| \, d\tau, $$and therefore \sup_{ 0 \leq t \leq T, \; h >0} \|\frac{1}{h}( x(t+h) - x(t))\| \leq C which implies that x \in W^{1,\infty}(]0,T[;H). Making use of the inequality$$ \frac{1}{h} ( x(t+h) - x(t), x(t) - u ) \leq \frac{1}{h} \int_t ^ {t + h} ( f ( \tau) - v, x(\tau) - u ) \, d\tau, \quad 0 \leq t < t + h \leq T, $$which holds for every [u,v] \in \partial \varphi we deduce that x(t) \in D(\partial \varphi ) for all t \in [0,T] and the weak closure of the set \{ \frac{1}{h} ( x(t+h) - x(t)), \; h >0\} belongs to f(t) - \partial \varphi x(t), \; \forall t \in [0,T]. On the other hand by writing$$ \| x(t+h) - u \| \leq \|x(t) - u\| + \int_t ^ {t + h} \|f(\tau) - v\|\, d\tau, \quad 0 \leq t < t + h \leq T,  for $u = x(t)$ and $v \in \partial \varphi x(t)$ we find that \begin{eqnarray*} \|(\partial \varphi x(t) - f(t) )^\circ \| &\leq& \| w-\lim_{h \searrow 0} \frac{1}{h}(x(t+h) - x(t))\| \\ &\leq& \limsup_{h \searrow 0} \|\frac{1}{h} ( x(t+h) - x(t)) \| \\ &\leq& \|(\partial \varphi x(t) - f(t) )^\circ \| . \end{eqnarray*} This shows that $\lim_{h \searrow 0} \frac{1}{h} ( x(t+h) - x(t)) = \frac{d^+}{dt} x(t)$ exists for every $t \in [0,T]$ and coincides with $-(\partial \varphi x(t) - f(t) )^\circ$. \begin{thebibliography}{99} \frenchspacing \bibitem{barbu} V. Barbu, \textit{Nonlinear Semigroups and Differential Equations in Banach Spaces}, Noordhoff (1976). \bibitem{CRAS_EqEvPer} M. Bostan, \textit{Solutions p\'eriodiques des \'equations d'\'evolution}, C. R. Acad. Sci. Paris, S\'er. I Math. t.332, pp. 1-4, \'Equations d\'eriv\'ees partielles, (2001). \bibitem{Almost} M. Bostan, \textit{Almost periodic solutions for evolution equations}, article in preparation. \bibitem{Bre} H. Brezis, \textit{Op\'erateurs maximaux monotones et semi-groupes de contractions dans les espaces de Hilbert}, Noth-Holland, Lecture Notes no. 5 (1972). \bibitem{BreHar} H. Brezis, A Haraux, \textit{Image d'une somme d'op\'erateurs monotones et applications}, Israel J. Math. 23 (1976), 2, pp. 165-186. \bibitem{brezis} H. Brezis, \textit{Analyse fonctionnelle}, Masson, (1998). \bibitem{Har} A. Haraux, \textit{\'Equations d'\'evolution non lin\'eaires: solutions born\'ees p\'eriodiques}, Ann. Inst. Fourier 28 (1978), 2, pp. 202-220. \end{thebibliography} \noindent\textsc{Mihai Bostan }\\ Universit\'e de Franche-Comt\'e \\ 16 route de Gray F-25030 \\ Besan\c{c}on Cedex, France \\ mbostan@math.univ-fcomte.fr \end{document}