\documentclass[reqno]{amsart}
\usepackage{hyperref}
\usepackage{amssymb}
\AtBeginDocument{{\noindent\small
\emph{Electronic Journal of Differential Equations},
Vol. 2013 (2013), No. 124, pp. 1--3.\newline
ISSN: 1072-6691. URL: http://ejde.math.txstate.edu or http://ejde.math.unt.edu
\newline ftp ejde.math.txstate.edu}
\thanks{\copyright 2013 Texas State University - San Marcos.}
\vspace{9mm}}
\begin{document}
\title[\hfilneg EJDE-2013/124\hfil Existence and uniqueness of a local solution]
{Existence and uniqueness of a local solution for $x' = f(t,x)$
using inverse functions}
\author[J. T. Hoag \hfil EJDE-2013/124\hfilneg]
{Jeffrey T. Hoag} % in alphabetical order
\address{Jeffrey T. Hoag \newline
Mathematics Department, Providence College,
Providence, RI 02918, USA}
\email{jhoag@providence.edu}
\thanks{Submitted January 13, 2013. Published May 20, 2013.}
\subjclass[2000]{34A12}
\keywords{Existence; uniqueness; ordinary differential equation}
\begin{abstract}
A condition on the function $f$ is given such that the scalar ordinary
differential equation $x' = f(t,x)$ with initial condition
$x(t_0) = x_0$ has a unique solution in a neighborhood of $t_0$.
An example illustrates that this result can be used when other theorems
which put conditions on the difference $f(t,x)-f(t,y)$ do not apply.
\end{abstract}
\maketitle
\numberwithin{equation}{section}
\newtheorem{theorem}{Theorem}[section]
\allowdisplaybreaks
\section{Introduction}
Consider the differential equation with initial condition:
\begin{equation} \label{e1.1}
x'(t)=f(t, x(t)), \quad x(t_0)=x_0
\end{equation}
where $f$ is a scalar-valued function which is continuous in a
neighborhood $N$ of $(t_0,x_0)$. The continuity of $f$
guarantees that there is at least one solution to this initial value problem.
There are various other conditions that can be imposed on $f$
which will ensure that \eqref{e1.1} has a unique solution.
Over twenty such uniqueness conditions are collected in [1].
Most of these, including results by Nagumo [3], Osgood [4] and Perron [5],
rely on restrictions on $f(t,x)-f(t,y)$ and can be considered generalizations
of the Lipschitz condition in the second argument.
In this article, a uniqueness theorem for \eqref{e1.1} is given which
instead puts the Lipschitz condition on the first argument of $f$. That is,
the condition is on the difference $f(t,x)-f(s,x)$ for $(t,x)$ and $(s,x)$
in $N$. It is easy to see that this is possible when $f(t_0,x_0)\neq 0 $
because in this case a solution of \eqref{e1.1} is invertible in a
neighborhood of $(t_0,x_0)$ and so if $t(x)$ is the inverse of a solution
to \eqref{e1.1}, it satisfies
\begin{equation} \label{e1.2}
t'(x)=g(x, t(x)), \quad
t(x_0)=t_0
\end{equation}
where we define $ g(x,t) = 1/f(t,x)$.
If $f$ is Lipschitz in its first argument in a neighborhood $N$ of
$(t_0,x_0)$ then there is a neighborhood $M$ of $(x_0,t_0)$ where
$g$ is Lipschitz in its second argument.
From this it follows that \eqref{e1.2} has a unique solution in a
neighborhood of $(x_0,t_0)$ and therefore \eqref{e1.1} has a unique
solution in a neighborhood of $(t_0,x_0)$.
The theorem that follows extends this approach to include cases when
$f(t_0,x_0)=0$.
It will be followed by an example for which this theorem applies
but other uniqueness theorems do not.
\section{Main result}
\begin{theorem} \label{mainthm}
For $(t_0,x_0)\in\mathbb{R}^2$ and positive numbers $a$ and $b$,
define
\[
U = [t_0 - a,t_0+a]\times[x_0-b,x_0+b].
\]
Let $f:U\to\mathbb{R}$ be a continuous function satisfying the
following three conditions:
\begin{itemize}
\item[(i)] there are constants $c>0$ and $r\in(0,1/2)$
such that
\[
|f(t,x)|\geq c|x-x_0|^r\quad\text{for all }(t,x)\in U;
\]
\item[(ii)] $f(t,x_0)$ is not identically zero on any interval
$(t_0-\varepsilon, t_0+\varepsilon)$ for $0<\epsilonx_0$.
Now let $x$ and $\tilde x$ be any two increasing solutions of \eqref{e1.1}
with inverses $t$ and $\tilde t$.
Since $t$ and $\tilde t$ are both solutions to \eqref{e2.1},
$$
|t(x) -\tilde t(x)|
\le |t(y) -\tilde t(y)| + \int_y^x
\frac{|f(t(s),s) -f(\tilde t(s), s)|}
{|f(t(s),s)|\,|f(\tilde t(s), s)|} ds
$$
for $x \geqslant y > x_0$. Then, using conditions (i) and (iii),
$$
|t(x) -\tilde t(x)| \le |t(y) -\tilde t(y)|
+ \frac{\alpha}{c^2} \int_y^x\frac{|t(s) -\tilde t(s)| }{
|s-x_0|^{2r}} ds\,.
$$
Applying the Gronwall-Reid Lemma to this inequality yields
$$
|t(x) -\tilde t(x)| \le |t(y) -\tilde t(y)|
\exp\Big\{ \frac{\alpha}{c^2} \int_y^x\frac{1 }{
|s-x_0 |^{2r}} ds\Big\}.
$$
Now take the limit as $y\to x_0 +$. Since $2r < 1 $,
the improper integral converges.
Also $|t(y) -\tilde t(y)|\to |t(x_0) -\tilde t(x_0)|=0$.
Therefore, $t(x) =\tilde t(x)$ in some interval $[x_0,x(t_0+\delta_1)]$
and so $x(t) = \tilde x(t) $ for $t\in [t_0,t_0 + \delta_1)$.
Thus there is at most one increasing solution to \eqref{e1.1} on an
interval $[t_0,t_0 + \delta_1)$. A similar arguments shows that there
is at most one decreasing solution to \eqref{e1.1} on an interval
$ [t_0,t_0 + \delta_2)$. Since it is well-known that \eqref{e1.1}
has either one solution or infinitely many solutions,
and since every solution of \eqref{e1.1} is monotone, it follows
that \eqref{e1.1} has a unique solution on some interval $[t_0,t_0 + \eta)$.
A similar argument shows that there is also a unique solution on some
interval $(t_0-\nu,t_0]$ .
\end{proof}
\subsection*{Examples}
Consider the initial-value problem
\begin{equation} \label{e3.1}
x'(t)=g(t) + h(t)|x(t)|^r, \quad x(0)= 0
\end{equation}
where $g$ and $h$ are non-negative Lipschitz continuous functions and $0