\documentclass[twoside]{article}
\usepackage{amsfonts, amsmath} % used for R in Real numbers
\pagestyle{myheadings}
\markboth{\hfil Nonlinear initial-value problems \hfil EJDE/Conf/10}
{EJDE/Conf/10 \hfil John V. Baxley \& Cynthia G. Enloe \hfil}
\begin{document}
\setcounter{page}{71}
\title{\vspace{-1in}\parbox{\linewidth}{\footnotesize\noindent
Fifth Mississippi State Conference on Differential Equations and
Computational Simulations, \newline
Electronic Journal of Differential Equations,
Conference 10, 2003, pp 71--78. \newline
http://ejde.math.swt.edu or http://ejde.math.unt.edu
\newline ftp ejde.math.swt.edu (login: ftp)}
\vspace{\bigskipamount} \\
%
Nonlinear initial-value problems with positive global solutions
%
\thanks{ {\em Mathematics Subject Classifications:} 34A12, 34B15.
\hfil\break\indent
{\em Key words:} Nonlinear initial-value problems, positive global solutions,
Carath\'eodory.
\hfil\break\indent
\copyright 2003 Southwest Texas State University. \hfil\break\indent
Published February 28, 2003. } }
\date{}
\author{John V. Baxley \& Cynthia G. Enloe}
\maketitle
\begin{abstract}
We give conditions on $m(t)$, $p(t)$, and
$f(t,y,z)$ so that the nonlinear initial-value problem
\begin{gather*}
\frac{1}{m(t)} (p(t)y')' + f(t,y,p(t)y') = 0,\quad\mbox{for }t>0,\\
y(0)=0,\quad \lim_{t \to 0^+} p(t)y'(t) = B,
\end{gather*}
has at least one positive solution for all $t>0$,
when $B$ is a sufficiently small positive constant.
We allow a singularity at $t=0$ so the solution $y'(t)$ may be unbounded
near $t=0$.
\end{abstract}
\numberwithin{equation}{section}
\newtheorem{theorem}{Theorem}[section]
\section{Introduction}
We consider the initial-value problem
\begin{gather} \label{ode}
\frac{1}{m(t)}(p(t) y')' + f(t, y, p(t) y') = 0 , \quad t>0,\\
\label {ic}
y(0)=0, \quad \lim_{t \to 0^+}p(t) y'(t) = B, \quad B > 0.
\end{gather}
We allow a singularity at $t=0$, and so $y'(t)$ may not be bounded
near $t=0$. However, we require of a solution that it be continuous
at $t=0$, satisfy (\ref{ode}) a.e. on some interval $(0,\delta)$, and satisfy
(\ref{ic}). The singularity may be caused by the behavior of $m$ or
$p$ or $f$ near $t=0$ or by some combination of them.
This problem was considered earlier by Zhao \cite{Z} and by Maagli and
Masmoudi \cite{MM}. In particular, \cite{Z} considered the case that
$m \equiv p \equiv 1$ while \cite{MM} required that $m \equiv p$. In
each of these papers, only one of the initial conditions ($y(0)=0$) was
imposed and conditions were specified which guaranteed that this
``incomplete'' initial-value problem has infinitely many positive solutions
existing on the entire interval $(0,\infty)$. Both papers viewed the
problem as a boundary-value problem by imposing a condition at $\infty$, namely
\begin{equation} \label{infcond}
\lim_{t \to \infty} \frac{y(t)}{r(t)} = c > 0 ,
\end{equation}
where $r(t) = \int_0^t (p(s))^{-1} \, ds$. In \cite{Z},
$r(t)$ reduces to $r(t) = t$. In both \cite{MM} and \cite{Z}, the
Schauder fixed point theorem is the main tool and the hypotheses
imposed allow the authors to prove existence of at least one solution
of the boundary-value problem for $c$ sufficiently small.
Here, we shall treat the problem in the initial-value form (\ref{ode}),
(\ref{ic}). We shall impose conditions rather close to those of \cite{MM}
and \cite{Z}, and prove that our initial-value problem has
at least one positive solution for $B$ sufficiently small. Our methods
use initial-value techniques, similar to those used already in \cite{B1,B2},
and are completely different from the previous papers
discussed above. To get started, we must have a local solution on some interval
$(0,\delta)$ and for that purpose, we need a slight
generalization of the classical theorem of Carath\'eodory \cite{CL},
which we provide in Section 2.
In Section 3, we prove our main result, which we state below.
Let $r(t) = \int_0^t (p(s))^{-1} \, ds$ and assume that
\begin{itemize}
\item[M1:] $p(t)$ and $m(t)$ are positive and continuous on
$(0,\infty)$;
\item[M2:] $\frac{1}{p(t)} \in L^1(0,1)$;
\item[M3:] for some positive number, $D<\infty$, \[f:(0,\infty) \times (0,Dr(\infty)) \times (0,D) \to {\mathbb R}\] is a measurable function on $(0,\infty) \times (0,Dr(\infty)) \times (0,D)$ and $f(t,\cdot,\cdot)$ is continuous on $(0,Dr(\infty)) \times (0,D)$ for each fixed $t \in (0,\infty)$;
\item[M4:] \[|f(t,y,z)| \leq h_1(t,y,z)y + h_2(t,y,z)z\] where $h_1(t,y,z) \to 0$ and $h_2(t,y,z) \to 0$ as $(y,z) \to (0,0)$, $h_1$ and $h_2$ are nonnegative, and for $\alpha > 0$, let $h(t,y,z) = h_1(t,y,z)r(t) + h_2(t,y,z)$, \[g_\alpha(s) = \sup\{h(s,y,z): 0 < y < \alpha r(s), 0 < z < \alpha\}, s>0,\] and
$m(s)g_\alpha(s) \in L^1(0,\infty)$ for sufficiently small $\alpha > 0$.
\end{itemize}
\begin{theorem} Under assumptions M1--M4, there exists $\gamma > 0$ so that
$B \in (0,\gamma)$ implies
that the initial-value problem (\ref{ode}), (\ref{ic}) has at least
one solution existing for $0 < t < \infty$ and satisfying
\begin{gather*}
\frac{B}{2} < p(t) y'(t) < \frac{3B}{2} ,\\
\frac{B r(t)}{2} < y(t) < \frac{3Br(t)}{2} ,
\end{gather*}
for $0 < t < \infty$. Moreover, the two limits
\[\lim_{t \to \infty} \frac{y(t)}{r(t)} , \quad
\lim_{t \to \infty} p(t) y'(t) \]
exist, and if $r(\infty) = \infty$, the two limits are equal.
\end{theorem}
Other than the fact that \cite{MM} requires that $m \equiv p$,
the only substantive difference in our hypotheses is that we
do not require that $h_1$, $h_2$ be nondecreasing with respect
to $y$ and $z$, as they do. Of course, we prove existence for
an initial-value problem, not a boundary-value problem as they
do.
The key to our proof is that our local existence theorem in
Section 2 is formulated carefully to provide a lower bound
on the length of the interval of existence. In applying it
in Section 3, we show that this lower bound gives us a uniform
lower bound on the length of the interval of existence, regardless
of where in the interval $[0,\infty)$ we start the solution.
Thus, we are able to step from $0$ to $\infty$ inductively,
without fear that the sum of the lengths of our intervals
will converge, to complete the proof.
\section{Local Solutions}
In this section, we consider the initial-value problem
\begin{gather} \label{locode}
\frac{1}{m(t)}(p(t) y')' + f(t, y, p(t) y') = 0 , \quad t>t_0,\\
\label{locic}
y(t_0)=A, \quad \lim_{t \to t_0^+}p(t) y'(t) = B .
\end{gather}
We use $x_1 = y$, $x_2 = p(t) y'$ to transform to the
two-dimensional system
\begin{equation} \label{system}
\begin{gathered}
x'_1 = \frac{x_2}{p(t)} \\
x'_2 = -m(t) f(t,x_1,x_2)
\end{gathered}
\end{equation}
with initial conditions
\begin{equation} \label{systemic}
\lim_{t \to t_0^+} x_1 (t) = A, \quad \lim_{t \to t_0^+}
x_2 (t) = B .
\end{equation}
Let $R(t) = \int_{t_0}^t (p(s))^{-1} \, ds$. We shall
assume that
\begin{itemize}
\item[L1:] There exists $b>t_0$ such that $p(t)$ and $m(t)$ are positive
and continuous on $(t_0,b)$.
\item[L2:] $\frac{1}{p(t)} \in L^1(t_0,b)$.
\item[L3:] $f:S \to {\mathbb R}$, where
$S = \{t_0 < t \leq b, A + cR(t) < y < A + dR(t), c < z < d\}$,
and $f$ is measurable in $t$ for each fixed $(y,z)$ and continuous in
$(y,z)$ for each fixed $t$.
\item[L4:] There exists $h(t) \in L^1(t_0,b)$ such that
$m(t)|f(t,y,z)| \leq h(t)$, almost everywhere on the set $S$.
\end{itemize}
We shall prove the following generalization of Carath\'eodory's local
existence theorem. The proof follows the same general lines as the
well-known proof in \cite{CL}.
\begin{theorem} \label{cara}
Suppose hypotheses L1-L4 are satisfied. Let $0 < d^* < \min\{d-B,B-c\}$ and suppose $\beta \in (0,b)$ and satisfies $\int_{t_0}^{t_0+\beta} h(s)ds < d^*$.
Then, the initial-value problem (\ref{locode}), (\ref{locic})
has a solution existing on the interval $[t_0, t_0 + \beta]$ and
satisfies
\begin{gather*}
A+cR(t) < x_1(t) < A+dR(t)\\
c < x_2(t) < d
\end{gather*}
for $t_0 < t \leq t_0 + \beta$.
\end{theorem}
\paragraph{Proof:} Choose a fixed integer $n >1$.
Let $h_n = \beta/n$ and let $t_k = t_0 + k h_n$ for $k = 1,2,\cdots,n$.
Define
\[
u_{2,n} (t) = B, \quad \mbox{for} \quad t_0 \leq t \leq t_1 .
\]
Note that $B - d^* < u_{2,n} (t) < B + d^*$ for $t_0 \leq t \leq t_1$.
Also define
\[
u_{1,n} (t) = A + \int_{t_0}^t \frac{u_{2,n} (s)}{p(s)} \, ds,
\quad \mbox{for} \quad t_0 \leq t \leq t_1 .
\]
It follows that
\[(B-d^*) R(t) < u_{1,n} (t) - A < (B+d^*) R(t) \]
and so
\[
A + (B-d^*) R(t) < u_{1,n} (t) < A + (B+d^*) R(t).
\]
Thus, $(t,u_{1,n} (t),u_{2,n} (t)) \in S$ for $t_0 \leq t \leq t_1$.
We extend the pair $(u_{1,n} , u_{2,n} )$ to the entire interval $[t_0 ,t_0 + \beta]$ by recursively defining the pair on the subintervals
$[t_{j-1} ,t_j]$. Thus, for each $j=2,3,\cdots,n$, we define
\begin{gather*}
u_{2,n} (t) = B - \int_{t_0}^{t-h_n} m(s) f(s,u_{1,n} (s),u_{2,n} (s)) \, ds,
\quad \mbox{for} \quad t_{j-1} \leq t \leq t_j ,\\
u_{1,n} (t) = A + \int_{t_0}^t \frac{u_{2,n} (s)}{p(s)} \, ds,
\quad \mbox{for} \quad t_{j-1} \leq t \leq t_j .
\end{gather*}
(The measurability of the integrand in the integral for $u_{2,n}$
follows from L3 by approximating with simple functions.)
Using L4, we have
\begin{eqnarray*}
|u_{2,n}(t)-B| &\leq& \int_{t_0}^{t-h_n}m(s)|f(s,u_{1,n}(s),u_{2,n}(s))|ds \\
&\le& \int_{t_0}^{t_0 + \beta}h(s)ds < d^* ,
\end{eqnarray*}
and therefore, $B-d^* < u_{2,n}(t) < B + d^*$. Further,
\[
(B-d^*)R(t) < u_{1,n}(t) - A < (B+d^*)R(t),
\]
and so
\[
A + (B-d^*)R(t) < u_{1,n}(t) < A + (B+d^*)R(t).
\]
These inequalities show that $(t,u_{1,n} (t),u_{2,n} (t))$ remains
in $S$ on each subinterval and the recursive definition is allowed.
Moreover, the two sequences $\{u_{1,n}\}$, $\{u_{2,n}\}$ are uniformly
bounded on $t_0 \leq t \leq t_0 + \beta$.
We shall show that these sequences are equicontinuous so that Ascoli's theorem may be applied. Suppose $t_0 \leq t \leq t^* \leq t_0 + \beta$. Then
\[
|u_{1,n}(t) - u_{1,n}(t^*)| = \Big|\int_{t}^{t^*} \frac{u_{2,n}(s)}{p(s)}
\, ds \Big|
\leq \int_{t}^{t^*} \frac{Q}{p(s)} \, ds,
\]
where $Q = \max\{|B-d^*|,|B+d^*|\}$. Moreover,
\begin{eqnarray*}
|u_{2,n}(t) - u_{2,n}(t^*)|
&=& \Big|\int_{t-h_n}^{t^*-h_n} m(s)f(s,u_{1,n}(s),u_{2,n}(s)) \, ds\Big| \\
&\leq& \int_{t-h_n}^{t^*-h_n} h(s) \, ds.
\end{eqnarray*}
The desired equicontinuity follows from absolute continuity of the
integral. Using Ascoli's theorem, we may assume without loss of
generality that both sequences converge uniformly on $[t_0,t_0+\beta]$ to limit functions $u_1 (t)$, $u_2 (t)$. We may use the
Lebesgue dominated convergence theorem (for $u_{2,n}$ the dominating
function is $h(s)$; for $u_{1,n}$, the dominating function is $(p(s))^{-1}$) to take limits under each integral sign as $n \to \infty$ to show
that
\begin{gather*}
u_1 (t) = \int_{t_0}^t \frac{u_2 (s)}{p(s)} \, ds ,\\
u_2(t) = B - \int_{t_0}^{t} m(s)f(s,u_1(s),u_2(s)) \, ds ,
\end{gather*}
for $t_0 \leq t \leq t_0 + \beta$, from which we obtain
\begin{gather*}
u_2'(t) = -m(t)f(t,u_1(t),u_2(t)),\\
u_1'(t) = \frac{u_2(t)}{p(t)} ,
\end{gather*}
almost everywhere on $[t_0 ,t_0 + \beta]$. \medskip
The specific size of $\beta$ provided by the hypotheses of this last
theorem is crucial for our main proof in the next section.
\section{Proof of Main Theorem}
First note that the hypotheses M1-M4 imply that the earlier hypotheses
L1-L4 hold on any interval $(t_0,b)$ with $0 \leq t_0 < b < \infty$,
so we may apply Theorem \ref{cara} as needed.
From hypothesis M4, we have $m(s) g_{\alpha_0} (s) \in L^1 (0,\infty)$ if $\alpha_0 > 0$ is sufficiently small. Further, M4 implies that
$m(s) g_\alpha (s) \leq m(s) g_{\alpha_0} (s)$ whenever $0<\alpha<\alpha_0$
and also that $m(s) g_\alpha (s) \to 0$ as $\alpha \to 0$, for all $s > 0$.
Thus, by the Lebesgue Dominated Convergence Theorem,
\[
\int_{0}^{\infty}m(t)g_{\alpha}(t)dt \to 0 \quad
\mbox{as } \alpha \to 0 .
\]
Hence, there exists $\delta \in (0,\alpha_0 ]$ such that $0 < \alpha < \delta$ implies
\[
\int_{0}^{\infty}m(t)g_{\alpha}(t)dt < \frac{1}{4}.
\]
We shall show that $\gamma = \frac 12 \min\{D,\delta\}$, where $D$
is the number from our hypothesis M3,
satisfies the requirements of our theorem.
To apply Theorem \ref{cara}, we pick $0 < C \leq \gamma$,
$d^* =C/2$, $c = 0$, $d = 2C$, $t_0 = 0$, $b = 1$, $B = C$, and $A = 0$.
Note that $d^* = \frac{d}{4} < d - d/2 = d - C = d - B$ and
$d^* < C = B - c$. So $d^* < \min\{d-B,B-c\}$.
By absolute continuity of the integral, there exists
$\beta \in (0,b) = (0,1)$ so that for $k = 0,1,\cdots$,
\begin{equation} \label{beta}
\int_{k \beta}^{(k+1)\beta} \alpha m(s)g_{\alpha}(s) \, ds < d^* .
\end{equation}
This last inequality, for $k=0$ allows us to
apply Theorem \ref{cara} to get a solution $y_1(t)$ on $[0,\beta]$ so that
for $0 < t \leq \beta$,
\begin{equation} \label{first}
0 < y_1(t) < 2Cr(t), \quad 0 < p(t)y'_1(t) < 2C\,.
\end{equation}
Integrating (\ref{ode}) from $0$ to $t$ and using M4, we obtain
\begin{eqnarray*}
|p(t)y_1'(t) - C| &\leq&
\int_{0}^{t} m(s)|f(s,y_1(s),p(s)y_1'(s))| \, ds \\
&=& 2C \int_{0}^{t} m(s) h(s,y_1(s),p(s)y_1'(s)) \, ds \\
&<& 2C\int_{0}^{t} m(s)g_{2C}(s) \, ds < \frac{C}{2},
\end{eqnarray*}
if $t \in [0,\beta]$. Hence,
\[\frac{C}{2} < p(t)y_1'(t) < \frac{3C}{2}.\]
Then
\[ y_1(t) = \int_{0}^{t}\frac{p(s)y_1'(s)}{p(s)}ds \]
and so
\[ \frac{C}{2}r(t) < y_1(t) < \frac{3C}{2}r(t).
\]
We claim that for $k=2,3,\dots,$ there exists a solution $y_k(t)$
of (\ref{ode}) on the interval $(k-1)\beta \leq t \leq k \beta$ so that
\begin{gather*}
y_{k+1}(k\beta) = y_k(k\beta),\\
y_{k+1}'(k\beta) = y_k'(k\beta),
\end{gather*}
for $k \geq 1$, and
\begin{equation} \label{ineqC}
\begin{gathered}
\frac{C}{2}r(t) < y_k(t) < \frac{3C}{2}r(t), \\
\frac{C}{2} < p(t)y_k'(t) < \frac{3C}{2},
\end{gathered}
\end{equation}
for $(k-1)\beta \leq t \leq k\beta$.
Noting that $y_1 (t)$ has already been constructed, we continue by
induction and
assume that $y_1(t), y_2(t),\dots,y_n(t)$ have been constructed.
Next, we construct $y_{n+1} (t)$. To use Theorem \ref{cara},
we keep $C$, $d^*$, $c$, $d$, $b$, and $\beta$ as before and let
$t_0 = n\beta$, $A = A_n = y_n(n\beta)$, and
$B = B_n = p(n\beta)y_n'(n\beta)$. Inequality (\ref{beta}) for $k=n$
allows us to apply Theorem \ref{cara} to get a solution $y_{n+1}(t)$ of
(\ref{ode}) on $[t_0,t_0 + \beta] = [n\beta, (n+1)\beta]$ so that
$y_{n+1} (n \beta) = y_n (n \beta)$ and
\begin{gather*}
A_n \leq y_{n+1}(t) < A_n + 2C\int_{t_0}^{t} \frac{1}{p(s)}ds, \\
0 < p(t)y_{n+1}'(t) < 2C.
\end{gather*}
To complete the induction, we must verify that $y_{n+1} (t)$ satisfies
(\ref{ineqC}).
Define $y(t)$ for $0 \leq t \leq (n+1)\beta$ by $y(t) = y_k(t)$ for $(k-1)\beta \leq t \leq k\beta$.
Since
\[ \frac{C}{2} < p(s)y'(s) < \frac{3C}{2},
\quad \mbox{for } 0 \leq s \leq n\beta \]
and
\[ 0 < p(s)y'(s) < 2C, \quad \mbox{for } n\beta \leq s \leq (n+1)\beta ,\]
it follows that
\[ 0 < p(s)y'(s) < 2C \]
for the larger interval, $0 \leq s \leq (n+1)\beta$, and it follows
by integrating that
\[
0 < y(t) < 2Cr(t), \quad \mbox{for } 0 \leq t \leq (n+1)\beta.
\]
The calculation appearing just after (\ref{first}) may now be
repeated to show that
\[
\frac{C}{2} < p(t)y'(t) < \frac{3C}{2}, \quad \mbox{for }
0 \leq t \leq (n+1)\beta ,
\]
which implies, as before, that
\[
\frac{C}{2} r(t) < y(t) < \frac{3C}{2}r(t), \quad \mbox{for }
0 \leq t \leq (n+1)\beta.
\]
Finally, we define $y(t)$ for $0 \leq t < \infty$ by $y(t) = y_k(t)$
for $(k-1)\beta \leq t < k\beta$, and each $k = 1,2,\dots$.
Clearly, $y(t)$ is the desired solution.
To investigate the limit of $p(t)y'(t)$ at infinity, we examine
\begin{eqnarray*}
p(t)y'(t) &=& p(t_0)y'(t_0) + \int_{t_0}^{t}(p(s)y'(s))'\,ds \\
&=& p(t_0)y'(t_0) - \int_{t_0}^{\infty}
(m(s)f(s,y(s),p(s)y'(s)){\mathcal X}_{[t_0,t]}(s) \, ds.
\end{eqnarray*}
Since $(m(s)f(s,y(s),p(s)y'(s)){\mathcal X}_{[t_0,t]}(s)