
\documentclass[reqno]{amsart}

\AtBeginDocument{{\noindent\small
{\em Electronic Journal of Differential Equations},
Vol. 2003(2003), No. 118, pp. 1--21.\newline
ISSN: 1072-6691. URL: http://ejde.math.txstate.edu or http://ejde.math.unt.edu
\newline ftp ejde.math.txstate.edu  (login: ftp)}
\thanks{\copyright 2003 Texas State University-San Marcos.}
\vspace{9mm}}

\begin{document}

\title[\hfilneg EJDE--2003/118\hfil
Existence and multiplicity of heteroclinic solutions]
{Existence and multiplicity of heteroclinic solutions for a
non-autonomous boundary eigenvalue problem}

\author[Luisa Malaguti \& Cristina Marcelli\hfil EJDE--2003/118\hfilneg]
{Luisa Malaguti \& Cristina Marcelli}  % in alphabetical order

\address{Luisa Malaguti\hfill\break
Department of Engineering Sciences and Methods \\
University of Modena and Reggio Emilia,
via Fogliani 1 - 42100 Reggio Emilia, Italy}
\email{malaguti.luisa@unimore.it}

\address{Cristina Marcelli \hfill\break
Department of Mathematical Sciences\\
Polytechnic University of Marche, via Brecce Bianche - 60131
Ancona, Italy} 
\email{marcelli@dipmat.univpm.it}


\date{}
\thanks{Submitted April 15, 2003. Published November 28, 2003.}
\subjclass[2000]{34B40, 34B18, 34C37}
\keywords{Boundary eigenvalue problems, positive bounded solutions,
\hfill\break\indent shooting method}


\begin{abstract}
 In this paper we  investigate the boundary eigenvalue problem
 \begin{gather*}
 x''-\beta(c,t,x)x'+g(t,x)=0 \\
 x(-\infty)=0, \quad x(+\infty)=1
 \end{gather*}
 depending on the real parameter $c$. We take $\beta$ continuous
 and positive and assume that $g$ is bounded and becomes active
 and positive only when $x$ exceeds a threshold value
 $\theta \in ]0,1[$. At the point $\theta$ we allow $g(t, \cdot)$
 to have a jump. Additional monotonicity properties are required,
 when needed. Our main discussion deals with the non-autonomous case.
 In this context we prove the existence of a continuum of values $c$
 for which this problem is solvable and we estimate the interval
 of such admissible values. In the autonomous case, we show its
 solvability for at most one  $c^*$. In the special case when
 $\beta$ reduces to $c+h(x)$ with $h$ continuous, we also give a
 non-existence result, for any real $c$. Our methods combine
 comparison-type arguments, both for first and second order
 dynamics, with a shooting technique. Some applications  of the
 obtained results are included.
\end{abstract}

\maketitle
\numberwithin{equation}{section}
\newtheorem{theorem}{Theorem}[section]
\newtheorem{lemma}[theorem]{Lemma}
\newtheorem{proposition}[theorem]{Proposition}
\newtheorem{corollary}[theorem]{Corollary}

\section{Introduction}

This paper concerns the boundary value problem
\begin{equation}
\begin{gathered}
x''-\beta(c,t,x)x'+g(t,x)=0 \\
x(-\infty)=0, \quad x(+\infty)=1
\end{gathered} \label{eq:P}
\end{equation}
depending on a real parameter $c$. Our  aim is to study the
solvability of \eqref{eq:P} when $c$ varies in a given open
interval $J\subset \mathbb{R}$.

We consider $\beta:J \times \mathbb{R}^2 \to \mathbb{R}$ continuous with
$\beta(\cdot,t,u)$ increasing for all $(t,u)\in \mathbb{R} \times
[0,1]$ and such that with
$$
m_c:=\inf_{(t,u)\in \mathbb{R} \times[0,1]}\beta(c,t,u) , \quad
M_c:=\sup_{(t,u)\in \mathbb{R} \times[0,1]}\beta(c,t,u)
$$ we have
\begin{equation}
0<m_c \le M_c < + \infty \quad \mbox{for all } c \in J.
\label{eq:limcomb}
\end{equation}
In several applications it is possible to choose the interval $J$
in such a way that \eqref{eq:limcomb} is satisfied. Moreover note
that the functions $m_c$, $M_c$ of the parameter $c\in J$ are
increasing, due to the monotonicity of $\beta(\cdot,t,x)$.

In the paper we are interested in models for which the
nonlinear term $g$ is active only for $x$ greater than a fixed
threshold value $\theta\in ]0,1[$. So we take $g: \mathbb{R}^2 \to
\mathbb{R}$ bounded, continuous in $\mathbb{R} \times [\theta, 1]$ and
satisfying the following conditions
\begin{equation}
\begin{gathered}
g(t,x)=0 \quad \mbox{whenever } x \in
]-\infty,\theta[\cup [1,+\infty[, \mbox{ for every } t \in \mathbb{R} \\
g(t,x) \ge \tilde g (x) >0 \quad \mbox{whenever } x \in
]\theta,1[, \mbox{ for every } t \in \mathbb{R}
\end{gathered}\label{eq:gcomb}
 \end{equation}
where $\tilde g\in
C^0([\theta,1])$ is a given function and $\tilde g(1)=0$.

The above hypotheses allow $g(t,\cdot)$ to have a jump at the
threshold value $\theta$. When this occurs, each solution of
\eqref{eq:P} will be of class $C^1(\mathbb{R})$ and twice
continuously differentiable on every interval where it does not
attain the value $\theta$.

In correspondence to the  values $x$ where $g$ is not active we
take $\beta$ increasing, that is we also require
\begin{equation}
\beta(c,t,\cdot) \quad \mbox{ increasing in } [0,\theta] \mbox{
for all } (c,t) \in J \times \mathbb{R}.  \label{eq:betamon}
\end{equation}

As a consequence of our assumptions, we prove that any solution
$x$ of \eqref{eq:P} necessarily satisfies $x'(t) \ge 0 $ for all
$t$. Therefore problem \eqref{eq:P} can be regarded as a search of
monotone heteroclinic connections between the stationary states
$x\equiv 0$ and $x\equiv 1$ of the differential equation in
\eqref{eq:P}. Since our discussion is mainly centered on the role
of the parameter $c$, this research can be viewed as an
investigation of a boundary eigenvalue problem.

There are several models, arising from different sciences, where
it is important to find a positive solution of a second order
dynamic satisfying suitable boundary conditions. This problem has
received lot of attention in the last decades and we provide some
progress also in this direction. We refer, in particular, to
\cite{AORW} (Chapter 5) for the investigation of bounded positive
solutions in a half-line when the second order equation does not
depend on $x'$. Moreover we refer to \cite[Theorem 5.1]{Brno}
for an existence result of positive solutions on all $\mathbb{R}$ for
the differential equation in \eqref{eq:P}, but under different
assumptions on $g$.

Problem \eqref{eq:P} can also be seen as the investigation of
non-trivial (i.e. non-constant in $\xi$) stationary solutions of
the parabolic equation
\[
u_{\tau}=u_{\xi \xi}+cu_{\xi}+g(\tau,u), \quad \tau\ge 0, \  \xi
\in \mathbb{R}
\]
having limits $x(\pm \infty)$ at infinity. We refer to \cite {VS}
for such an analysis but under different conditions on the
non-linear term $g$.

Finally, note that by applying \cite[Theorem 4.3]{O'R}, which
requires very few regularity assumptions on $\beta$ and $g$, we
could be able to obtain, for each $c \in J$, the existence of a
solution $x$ of the differential equation in \eqref{eq:P}
satisfying $0 \le x(t) \le 1$ for all $t \in \mathbb{R}$. However,
under the assumptions \eqref{eq:gcomb} on $g$, this is not enough
to guarantee the asymptotic properties of $x$ at $\pm \infty$
required in \eqref{eq:P}.

Under suitable constraints on the values of $M_c$, $m_c$ in the
interval $J$, in this paper we show the existence of a range of
values of $c$ for which \eqref{eq:P} is solvable. More accurate
conclusions follow with additional monotonicity properties.

Throughout the paper we denote
\begin{equation}
g_1(x):=  {\inf_{t \in \mathbb{R}}} g(t,x) \quad \mbox{ and }
\quad g_2(x):= \sup_{t \in \mathbb{R}} \ g(t,x). \label{eq:star}
\end{equation}
Of course, $g_1, g_2$ are bounded with $g_1(x)=g_2(x)=0$ in
$]-\infty,\theta[\cup [1,+\infty[$, $g_2(x) \ge g_1(x) \ge \tilde
g(x)> 0$ in $]\theta,1[$.

Our main result is the following.

\begin{theorem} \label{thm1.1}
Let $\beta: J \times \mathbb{R} \times [0,1] \to \mathbb{R}$ be a continuous function,
satisfying conditions \eqref{eq:limcomb} and  \eqref{eq:betamon}.
Let $g:\mathbb{R}^2 \to\mathbb{R}$ be a bounded function, continuous in
$\mathbb{R} \times [\theta,1]$, satisfying condition \eqref{eq:gcomb}. Assume
\begin{gather}
\lim_{c \to \inf J} M_c < \sqrt{2\int_{\theta}^1 g_1(s)\,ds}
\label{eq:thm1.1-1} \\
\lim_{c \to \sup J} m_c > \frac{1}{\theta}\sqrt{2\int_{\theta}^1
g_2(s)\, ds}. \label{eq:thm1.1-2}
\end{gather}
Then, for all $\tau \in \mathbb{R}$ there exists a non-empty set
$C_\tau \subset J$ of values of the parameter $c$ for which
problem \eqref{eq:P} has a solution $x_{\tau}$ satisfying
$x_{\tau}(\tau)=\theta$. Moreover, if we further assume
\begin{equation}
\begin{array}{ll}
(i) \quad &\beta(\cdot, t,x) \mbox{ strictly increasing for all }(t,x) \in \mathbb{R} \times [0,1],\\
(ii) \quad &\beta(c, \cdot, x) \mbox{ decreasing for each }
(c,x)\in J \times [0,1] \\
(iii) \quad & g(\cdot, x) \mbox{ increasing for all } x\in [0,1]
\end{array} \label{eq:MON}
\end{equation}
then the set $C_{\tau}$ contains a unique element $c_\tau$, for all
$\tau \in \mathbb{R}$.
\end{theorem}


This theorem can be compared to related recent papers. First, we
refer to \cite{DP}, \cite{DR} and \cite{TR} for boundary
eigenvalue theories developed in different contexts. In
particular, in \cite{TR} $\beta(c,t,x)=cq(t)$ where $t$ varies in
a compact interval and the boundary conditions depend polynomially
on the spectral parameter $c$. In \cite{DP} and \cite{DR} the
nonlinear eigenvalue problem for the generalized p-Laplacian
equation $-\mbox{div}(a(t)\vert \nabla x\vert ^{p-2}\nabla x
)=cf(t,x)$ is considered in an unbounded domain, with $x \in
\mathbb{R} ^ n$, $p>1$ and $c>0$. Notice the non-variational
nature of the differential equation appearing in \eqref{eq:P}.

Even when $C_\tau$ is a singleton, say $C_\tau =\{c_\tau\}$, the
values of the parameter $c_\tau$ corresponding to different times
$\tau$, are in general distinct, that is there is a range of
values of the parameter $c$ for which \eqref{eq:P} is solvable, as
the following result states.

\begin{theorem} \label{thm1.2}
Let all the assumptions of Theorem \ref{thm1.1} be valid and take in addition
$\beta(c, \cdot, x)$ strictly
decreasing in $t$ for all $(c, x)\in J\times [0,1]$. Then the map
$\tau \longmapsto c_{\tau}$ is an injective continuous function
from $\mathbb{R}$ to $J$. Moreover, the image set $C:=\{ c_\tau: \tau
\in \mathbb{R} \}$ is a bounded open interval, with $\inf C > \inf J$.
\end{theorem}

The existence of a range of values of the
parameter $c$ is typical of the non-autonomous case. When the
problem is autonomous, that is $\beta(c,t,x)=\beta(c,x)$ and
$g(t,x)=g(x)$, assumptions (\ref{eq:MON} {\em ii-iii})  are
trivially fulfilled. Hence, if all the other conditions of Theorem
1.1 hold,  there exists a unique $c=c^*$ for which problem
\eqref{eq:P} is solvable. In fact, by Theorem \ref{thm1.1} we know that the
set $C_\tau $ contains the unique element $c_\tau$. Moreover,
 if $x_\tau$ is a solution for $c=c_\tau$
satisfying $x_\tau(\tau)=\theta$, then for every $\tau\ne \tau'$
the shifted function $x_{\tau'}(t):=x_\tau(t+\tau-\tau')$ is a
solution of \eqref{eq:P}, again for $c=c_\tau$, satisfying
$x_{\tau'}(\tau')=\theta$. Hence, in the autonomous case we
necessarily have $c_\tau=c_\tau'=c^*$ for every $\tau,\tau'\in
\mathbb{R}$. In addition  it is possible to prove that the solution of
\eqref{eq:P} corresponding to $c=c^*$ is unique, up to a
time-shift.

The consequences of Theorem \ref{thm1.1} for autonomous problems are
summarized by the following result.

\begin{corollary} \label{coro1.3}
Take $\beta=\beta(c,x)$ and $g=g(x)$ satisfying all the assumptions of
Theorem \ref{thm1.1}, with $\beta(\cdot,x)$ strictly increasing. Then,
there is a unique $c^* \in J$ such that \eqref{eq:P} is solvable
and the solution is unique up to a time-shift.
\end{corollary}

Typical examples of autonomous functions $\beta$ in our analysis
are
\begin{itemize}
\item[(a)] $\beta(c,t,x)=c+ h(x)$
\item[(b)] $\beta(c,t,x)=ck(x)$
\end{itemize}
where $h$ and $k$ denote real continuous functions.

The case (a) appears in the investigation of front-type solutions
with wave speed $c$, for reaction-diffusion equations with
convective effects, that is equations of the type
\begin{equation}
 {\frac{\partial u}{\partial t}+\frac{\partial
H(u)}{\partial x}=\frac{\partial^2 u}{\partial x^2} +g(u)} \quad
t \ge 0, x \in \mathbb{R}
\label{eq:rd}
\end{equation}
where $h(u)= \frac{dH}{du}$ denotes the convective speed. We
recall that a solution $u(t,x)$ of (\ref{eq:rd}) is said to be a
travelling wave (or front-type) solution (see e.g. \cite{F})
whenever there exist a function $v \in C^2(\mathbb{R})$ and a real
constant $c$ satisfying $u(t,x)=v(x+ct)$ for all $t \ge 0$ and $x
\in \mathbb{R}$. This problem, when the function $H$ is constant,
was extensively studied mainly in combustion and population
genetics models (see e.g. \cite{AW}, \cite{BNS} and \cite{F}). In
particular, in the special case when  $g$ is lipschitzian in
$[\theta, 1]$, Berestycki-Nicolaenko-Scheurer (see \cite{BNS})
proved the existence of a unique positive $c^*$ for which
\eqref{eq:P} is solvable. Note that in this case we have
$m_c=M_c=c$ and we can choose $J=]0,+\infty[$ in such a way that
(\ref{eq:thm1.1-1}) and (\ref{eq:thm1.1-2}) are trivially
satisfied. Hence, our result can be seen as a generalization of
the one  in \cite[Theorem 3.1]{BNS}.

On the other hand, few results are available up to now regarding
equation (\ref{eq:rd}) with a non-constant convective effect $H$;
see to this purpose \cite {Murray} for linear convective terms,
\cite {MathNach} and \cite {MI} for the nonlinear case. This
situation presents an interesting dynamic, since the presence of a
convective effect may cause the disappearance  of front-type
solutions.

More in detail, observe that condition \eqref{eq:betamon} is
satisfied whenever $h(x)$ is increasing in $[0,\theta]$. Moreover,
(\ref{eq:thm1.1-2}) is trivially fulfilled if $J$ is unbounded,
while (\ref{eq:thm1.1-1}) holds whenever
\begin{equation}  M - m <
\sqrt{2 \int_\theta^1 g_1(s) ds} \label{eq:lim}
\end{equation}
where $M:= \max_{x \in [0,1]} h(x)$ and $m:= \min_{x
\in [0,1]} h(x)$. In fact, in order to have $m_c = c + m>0$ for
every $c \in J$ we should take $J=] -m, +\infty [ $, so that
$\lim_{c \to \inf J} M_c = M - m$ \ and $\lim_{c \to
\sup J} m_c = +\infty$.

Condition (\ref{eq:lim}) is essentially a constraint on the growth
of $h(u)$. We remark that if $h(u)$ grows too much on $[0,1]$, it
may happen that the boundary value problem \eqref{eq:P} has no
solutions for any value of the parameter $c$, that is the
reaction-diffusion equation (\ref{eq:rd}) does not admit
travelling wave solutions, even when the convective speed $h$ is
linear. In fact, the following non-existence result holds.


\begin{theorem} \label{thm1.4}
Let $\beta(c,t,x):= c + h(x)$ where $h$ is
a continuous function,  and let $g$ be as in Theorem \ref{thm1.1}. Assume
\begin{equation}
\int_0^\theta h(s) ds - \theta h(0) \ge \sqrt{2\int_\theta^1
g_2(s)ds} .  \label{eq:nonex}
\end{equation}
Then, problem \eqref{eq:P} has no solution, whatever the value $c
\in \mathbb{R}$ may be.
\end{theorem}

On the other hand, when (\ref{eq:lim}) holds, we prove the
following existence result, which also provides an estimate for
the value $c^*$.

\begin{corollary} \label{coro1.5}
 Let $\beta(c,t,x):= c + h(x)$ where $h$
is a continuous function. If (\ref{eq:lim}) holds, then there
exists a unique value $c^*$ for which problem \eqref{eq:P} is
solvable and we have
\begin{multline}
\sqrt{2 \int_\theta^1 g_1(s) ds} - \int_0^\theta  h(s) ds
- (1 -\theta) M  \\
<  c^* < \frac{1}{\theta}\Bigg[ \sqrt{2\int_\theta^1 g_2(s)ds} -
\int_0^\theta h(s) ds \Bigg]. \label{eq:estim}
\end{multline}
\end{corollary}


In the particular case $h(x)\equiv 0$, the previous estimate
becomes
$$
\sqrt{2\int_{\theta}^ 1 g_1(s)\, ds } \le c \le \frac {1}{\theta}
\sqrt{2 \int_{\theta}^{1} g_2(s)\, ds } \quad \mbox{for every } c
\in C_\tau \mbox{ and every } \tau \in \mathbb{R}.
$$

Our approach consists in reducing the problem \eqref{eq:P} to an
equivalent one on the half line $[0,+\infty[$, which is tackled by
a shooting technique.

More in detail, if $x$ is a solution of \eqref{eq:P} for a given
$c$ in $J$, we denote $\tau_x:=\min \{ t : x(t)=\theta\}$. Due to
the boundary condition $x(-\infty)=0$, the value $\tau_x$ is
well-defined and $x(t)<\theta$ for every $t< \tau_x$. Note that in
the special case when  $\beta(c,t,u)\equiv c$, every  solution $x$
satisfies $x'' -cx'=0$ for all $t\le \tau_x$. Hence, as it is easy
to see, the boundary condition $x(-\infty)=0$ is equivalent to the
tangential condition $x'(\tau_x)=c\theta$ at the point $t=\tau_x$.
Also in our general setting we replace the boundary condition at
$-\infty$ with a suitable tangential one at $t=\tau_x$. This is
possible (see Section 2, Theorem \ref{thm2.4}) for example in the case when
the trajectories having at $t=\tau_x$ different slopes do not
intersect each other in the negative half-line. Lemma \ref{lm2.2} shows
that such a behavior is guaranteed by condition
\eqref{eq:betamon}.

 In Section 3 we prove some asymptotic properties of the solutions.
We then combine these results with a shooting method developed in
Section 4 for studying the asymptotic behavior when $t \to
+\infty$. Section 5 is devoted to a relative compactness result
for families of solutions. The proofs of all these results are
presented in Section 6. This section also contains an example of
an autonomous problem of the type (a) with a linear function
$h(x)=kx$, for which the solvability and the non-solvability
depend on the value of  the slope $k$.


\section{A comparison type approach for negative times}

The first part of this study is devoted to the investigation of
the behavior of the solutions for negative times, restricting our
study to those solutions $x$ for which
$\tau_x:=\min\{t :x(t)=\theta\}=0$, that is from now on we investigate
solutions $x$ of the terminal value problem
\begin{equation}
\begin{gathered} x''-\beta(c, t, x)x'=0 \quad \mbox{for } t \le 0 \\
 x(0)\,\,\,=\theta. \end{gathered} \label{eq:tmeno}
\end{equation}

The following preliminary results concern properties of the
solutions of (\ref{eq:tmeno}) which will be used to replace the
boundary condition $x(-\infty)=0$ with a tangential one at $t=0$.


\begin{lemma} \label{lm2.1}
Given $c \in J$, let $x$ be a non-constant solution of (\ref{eq:tmeno}).
Then $x'(t)\ne 0$, for all $t\le 0$.
\end{lemma}

\begin{proof}
First observe that if $x'(0)=0$ then $x$
is constant and equal to $\theta$ in $]-\infty,0]$. In fact, if
$x'(t_0)\ne 0$ for some $t_0<0$, put $t_1:=\sup \{ \tau \le 0:
x'(t) \ne 0 \mbox{ for every } t \in [t_0,\tau]\}$, of course
$x'(t_1)=0$. Since \ $ \frac{x''(t)}{x'(t)}=\beta(c, t, x(t)) $ in
$[t_0,t_1[$, we deduce
\[
x'(t_1)= x'(t_0)\mbox{e}^{\int_{t_0}^{t_1} \beta(c, s, x(s))\,
ds}\ne 0 ,
\]
a contradiction. Hence, if $x$ is not constant, we have $x'(0)\ne
0$. Therefore, put $\tau_0:= \inf\{ \tau < 0 : x'(t) \ne 0 \mbox{
for every } t \in ]\tau,0]\}$,  for every $t\in ]\tau_0,0[$ we
have  $x'(t) = x'(0)\mbox{e}^{-\int_t^0 \beta(c, s, x(s))\, ds}$,
hence $\tau_0=-\infty$.
\end{proof}


We are now able to show that, under condition \eqref{eq:betamon},
the solutions of (\ref{eq:tmeno}) having different positive slopes
at $t=0$, do not intersect each other on the negative half-line.

\begin{lemma} \label{lm2.2}
Assume conditions \eqref{eq:limcomb} and \eqref{eq:betamon}.
Given $c \in J$, let $x_1$ and $x_2$ be
solutions of \eqref{eq:tmeno}. Then, if  $x_1'(0)>x_2'(0)>0$, we have
\[
x_1(t)<x_2(t) \quad \mbox{for all } t <0.
\]
\end{lemma}

\begin{proof}  Given $c\in J$, take $\tau < 0$
satisfying $x_1(t) < x_2(t)< \theta $ for each $t \in ] \tau, 0[$.
Such a value $\tau$ exists due to the tangential conditions at
$t=0$ and Lemma \ref{lm2.1}. Assumption \eqref{eq:betamon} then implies
$$
\mbox{e}^{-\int_t^0 \beta(c,s,x_1(s))\, ds} \ge
\mbox{e}^{-\int_t^0 \beta(c,s,x_2(s))\, ds}
$$
for any $t \in [ \tau, 0]$ and this yields
$$
x_1' (t)=x_1' (0)\mbox{e}^{-\int_t^0 \beta(c,s,x_1(s))\, ds}
> x_2'(0)\mbox{e}^{-\int_t^0 \beta(c,s,x_2(s))\, ds}=x_2'(t).
$$
Hence $x_1(\tau)< x_2(\tau)$.
\end{proof}

For the sake of completeness we recall now the comparison type result
that we shall employ. Let $I$ be a real interval and denote by
$I^0$ its interior. Given a continuous function $f: I\times \mathbb{R}^
2 \to \mathbb{R} $, consider the second order equation
\begin{equation}
x''=f(t,x,x').
\label{eq:schrader}
\end{equation}
We shall say that a function $\varphi \in C^0(I) \cap C^2(I^0)$ is
a lower solution  of  (\ref{eq:schrader}) on $I$ if $\varphi''(t)
\ge f(t, \varphi(t), \varphi'(t))$ for all $t \in I^0$. In a
similar way a function $\psi \in C^0(I) \cap C^2(I^0)$ satisfying
$\psi''(t) \le f(t, \psi(t), \psi'(t))$ for all $t \in I^0$ will
be called an upper  solution for (\ref{eq:schrader}) on $I$ (see
e.g. \cite {S}). The following result, which is a slightly
modified version of Theorem 4.1 in  \cite{S}, holds.

\begin{proposition} \label{prop2.3}
Let $\varphi, \psi \in C^1]-\infty, 0]\cap C^2]-\infty, 0[$ be respectively
lower and upper solutions for  \eqref{eq:schrader}, with
$\varphi(t)\le \psi(t)$ for all $t\in ]-\infty, 0]$. Assume that for every
compact subinterval $I$ of $]-\infty,0]$ there exist two positive
continuous functions $h$ and $k$, defined for $s\ge 0$, satisfying
\[
\int_0^{+\infty} \frac{s}{h(s)}ds= \int_0^{+\infty}
\frac{s}{k(s)}ds =+\infty
\]
such that
\begin{equation}
\begin{gathered}
 f(t,x,y) \le k(y) \quad \mbox{whenever } y \ge 0, t\in I \mbox {
and } \varphi(t)\le x \le \psi(t)\\
f(t,x,y) \ge -h(-y)  \quad \mbox{ whenever } y \le 0, t\in I \mbox {
and } \varphi(t)\le x \le \psi(t). \end{gathered}
\label{eq:upp-low}
\end{equation}
Then, for every $\alpha\in [\varphi(0),\psi(0)]$ equation
\eqref{eq:schrader} admits a solution $x$ such that $x(0)=\alpha$
and $\varphi(t) \le x(t) \le \psi(t)$ for $t<0$.
\end{proposition}

\begin{proof} For $(t,x,y) \in [0, +\infty[\times \mathbb{R} ^2$ define
$f_1(t,x,y):=f(-t, x, -y )$. For $t\ge 0$, define
moreover $\varphi_1(t):=\varphi(-t)$ and $\psi_1(t):=\psi(-t)$.
Then $\varphi_1$ and $\psi_1$ are respectively a lower and an
upper solution of $x''=f_1(t,x,x')$ on $[0, +\infty[$ satisfying
$\varphi_1(t)\le \psi_1(t)$ for all $t \ge 0$. Finally, according
to assumption (\ref{eq:upp-low}),  the following growth conditions
hold on each compact interval $I$ contained in $[0,+\infty[$,
\begin{gather*}
 f_1(t,x,y)\ge -h(y) \quad \mbox{whenever } y
\ge 0, t\in I \mbox{ and } \varphi_1(t)\le x \le \psi_1(t) \\
f_1(t,x,y)\le k(-y) \quad\mbox{whenever } y \le 0, t\in I \mbox{ and }
\varphi_1(t)\le x \le \psi_1(t).
\end{gather*}
Hence \cite[Theorem 4.1]{S} can be applied and for all $\alpha \in
[\varphi_1(0), \psi_1(0)]$ a solution $x_1$ of $x''=f_1(t,x,x')$
exists on $[0, +\infty[$ satisfying $x_1(0)=\alpha$ and
$\varphi_1(t) \le x_1(t) \le \psi_1(t)$ for $t \ge 0$. As it is
easy to see, the function $x(t):=x_1(-t)$ is a solution  of
(\ref{eq:schrader}) with the required properties.
\end{proof}

We are now able to state our main result concerning the behavior
of the solutions of \eqref{eq:P} in the negative half-line.

\begin{theorem} \label{thm2.4}
Assume \eqref{eq:limcomb} and \eqref{eq:betamon}. Then for all $c
\in J$ the following boundary value problem on $]-\infty, 0]$
\begin{equation}
\begin{gathered}
x''-\beta(c,t,x)x' =0\\
x(0)=\theta, \quad x(-\infty)=0
\end{gathered} \label{eq:Pmeno}
\end{equation}
is solvable. Moreover, all the solutions of (\ref{eq:Pmeno}) have
the same slope $\lambda=\lambda(c) $ at $t=0$, which is a
continuous increasing function of the parameter $c$, satisfying
$\theta m_c \le \lambda(c) \le\theta M_c$.
\end{theorem}

\begin{proof} {\it i) \ Solvability of
(\ref{eq:Pmeno}).}\ Given $c \in J$, consider the functions
$\varphi(t):=\theta\mbox{e}^{M_ct}$ and $\psi(t):=\theta
\mbox{e}^{m_c t}$ defined for $t\le 0$. According to
\eqref{eq:limcomb}, it is easy to see that $\varphi$ and $\psi$
are respectively a lower and an upper solution of problem
(\ref{eq:tmeno}) in $]-\infty, 0]$, satisfying
\[
\varphi(t) \le \psi(t)\  \mbox{ for all } t < 0 \quad \mbox {and}
\quad \varphi(0) = \psi(0)= \theta.
\]
In addition, again by \eqref{eq:limcomb}, it follows that
\begin{gather*}
\beta(c,t,x)y \le M_cy \quad \mbox{for } y
\ge 0 \mbox{ and } (t,x) \in ]-\infty, 0] \times \mathbb{R} \\
\beta(c,t,x)y \ge -M_c(-y) \quad \mbox{for } y \le 0 \mbox{ and }
(t,x) \in ]-\infty, 0] \times \mathbb{R}.
\end{gather*}
Therefore, assumption (\ref{eq:upp-low}) of Proposition \ref{prop2.3} is
satisfied taking $ h(y)=k(y):=M_cy$. Hence, a solution $x(t)$ of
(\ref{eq:tmeno}) exists on $]-\infty, 0]$ such that
\begin{equation}
\theta \mbox{e}^{M_ct} \le x(t) \le \theta \mbox{e}^{m_ct} \quad
\mbox{for all } t\le 0. \label{eq:xtmeno}
\end{equation}
This implies, in particular, $x(0)=\theta$ and $x(-\infty)=0$.

\noindent {\it ii)\ Uniqueness of $\lambda(c)$.} \ Given $c \in
J$, let $x(t)$ be a solution of (\ref{eq:Pmeno}). Taking Lemma \ref{lm2.1}
into account, we have $ x'(t)>0$ for every $t \le 0$. Since $
\frac{x''(t)}{x'(t)}=\beta(c,t,x(t))$ for $t\le 0$, we obtain
\[
x(-\infty)=\theta -x'(0)\int_{-\infty} ^ 0 {e}^{-\int_s^0
\beta(c,\sigma,x(\sigma))\, d\sigma }\, ds.
\]
Consider now an initial positive slope $\eta< x'(0)$ and let $y$
be a solution of the Cauchy problem
\begin{gather*}
y''-\beta(c,t,y)y' =0\\
y(0)=\theta, \quad y'(0)=\eta.
\end{gather*}
According to Lemma \ref{lm2.1} we have $y'(t)>0$ for all $t<0$. On the
other hand, Lemma \ref{lm2.2} ensures $y(t)>x(t)$ for all $t<0$. As a
consequence of the monotonicity property \eqref{eq:betamon} of
$\beta$ with respect to $x$, we have
\[
\int_{-\infty} ^ 0 {e}^{-\int_s^0 \beta(c,\sigma,y(\sigma))\,
d\sigma }\, ds \le \int_{-\infty} ^ 0 {e}^{-\int_s^0
\beta(c,\sigma,x(\sigma))\, d\sigma }\, ds,
\]
implying
\[
y(-\infty)= \theta -\eta \int_{-\infty} ^ 0 {e}^{-\int_s^0
\beta(c,\sigma,y(\sigma))\, d\sigma }\, ds > x(-\infty)=0.
\]
Similarly from $y'(0)> x'(0)$ it follows $y(-\infty)<0$.
Hence, the boundary condition $x(-\infty)=0$ implies a unique
tangential condition at $t=0$, which only depends on the parameter
$c$.

\noindent {\it iii) Monotonicity of $\lambda(c)$.}\ Given
$c_1<c_2 $, consider a solution $x_1(t)$ of the boundary value
problem
\begin{gather*}
x''-\beta(c_1,t,x)x' =0\\
x(0)=\theta, \quad x(-\infty)=0
\end{gather*}
lying between the functions $\theta \mbox{e}^{M_{c_1}t}$ and
$\theta \mbox{e}^{m_{c_1}t}$; such a solution exists by the proof
of part $i)$.
 According to the monotonicity of $\beta$ with respect to $c$ and since
$x_1'(t)>0$ for all $t\le 0$ (see {\it ii)}), we
have $x_1''=\beta(c_1,t,x_1(t))x_1'(t) \le
\beta(c_2,t,x_1(t))x_1'(t)$. Hence $x_1$ is an upper solution of
the equation $x''-\beta(c_2,t,x)x'=0$ on $]-\infty,0]$. On the
other hand, recall that $\theta\mbox{e}^{M_{c_2}t}$ is a lower
solution of the same equation in $]-\infty, 0]$ satisfying
\[
\theta\mbox{e}^{M_{c_2}t}\le \theta\mbox{e}^{M_{c_1}t} \le x_1(t) \quad \mbox{for } t\le 0.
\]
Hence, by applying Proposition \ref{prop2.3}, the equation
$x''-\beta(c_2,t,x)x'=0$ admits a solution $x_2(t)$ satisfying
$x_2(0)=\theta$  and $\theta\mbox{e}^{M_{c_2}t}\le x_2(t)\le
x_1(t)$ for all $t \le 0$, in particular $x_2(-\infty)=0$. Since
$x_2'(0)\ge x_1'(0)$, by the uniqueness of $\lambda (c_2)$ we have
$x_2'(0)=\lambda(c_2)$ implying
\[
\lambda(c_1) \le \lambda(c_2).
\]

\noindent {\it iv) \ Continuity of $\lambda(c)$.}\ Fixed $c_0\in
J$, let $(c_n)_n$ be a monotone sequence of values in $J$
converging to $c_0$ as $n \to +\infty$. Let $(x_n)_n$ be a
corresponding sequence of solutions of the boundary value problems
\begin{equation}
\begin{gathered}
x''-\beta(c_n,t,x)x' =0\\
x(0)=\theta, \quad x(-\infty)=0
\end{gathered} \label{eq:xmenocn}
\end{equation}
satisfying  $\theta e^{M_{c_n}t} \le x_n(t) \le \theta
e^{m_{c_n}t}$ for all $t \le 0$.
 According to {\it i)} such solutions exist and by {\it
ii)} they satisfy $x_n'(0)=\lambda(c_n)$. Moreover we have
$0<x_n(t) \le \theta$ for all $t\le 0$ and from Lemma \ref{lm2.1} we
deduce
\[
x_n'(t)>0 \quad \mbox{for all } t \le 0 \mbox{ and } n\in N,
\]
implying $x_n''(t)=\beta(c_n, t, x_n(t))x_n'(t)>0$; hence
\[
0<x_n'(t)\le x_n'(0)=\lambda(c_n)  \quad \mbox{for } t \le 0 \mbox{ and } n\in N.
\]
Let $ \bar c:=  {\sup_{n \in N }} c_n$. According to
the monotonicity of both $\lambda$ and $\beta$, we obtain $0 \le
x_n''(t)\le M_{c_n}x_n'(t)\le M_{\bar c }\lambda(\bar c)$.
Therefore, the set $(x_n)_n$ is relatively compact in the
Fr\'echet space $C^1(]-\infty, 0])$. It is then possible to
extract a subsequence $(x_{n_k})_k$, which converges to a function
$x \in C^1(]-\infty, 0])$ uniformly on the compact subsets of
$]-\infty, 0]$ and such that also $(x_{n_k}')_k$ uniformly
converges to $x'$ on the compact subsets of $]-\infty, 0]$.
Consequently, $x$ is a solution of $x''-\beta(c_0,t,x)x'=0$ on
$]-\infty, 0]$. Moreover, note that $x_n(t) \le \theta
e^{m_{\tilde c}t}$ for all $t \le 0$, where $\tilde c :=\inf c_n$.
Hence, $x(-\infty)=0$ and then,
 by the uniqueness of
$\lambda(c_0)$, we get $x'(0)=\lambda(c_0)$. Therefore,
$\lambda(c_{n_k})=x_{n_k}'(0) \to x'(0)= \lambda(c_0)$ when $k \to
+\infty$. Taking  the monotonicity of function $\lambda$ into
account, this implies that $\lambda(c_0^+) =\lambda
(c_0^-)=\lambda(c_0)$.
\end{proof}

\section{Some asymptotic properties}

This part deals with the asymptotic behavior of the solutions of
the second order differential equation
\begin{equation}
x'' -\beta(c,t,x,)x' + g(t,x)=0 \label{eq:eq}
\end{equation}
subject to conditions  \eqref{eq:limcomb} and \eqref{eq:gcomb}. We
shall need such properties in the next section for developing our
shooting method.

Since the solutions we are looking for take values in $[0,1]$, the
behavior of function $\beta(c,t,\cdot)$ outside the interval
$[0,1]$ is not relevant for the aims of this investigation; so, we
can assume, without loss of generality,
\begin{equation}
 \beta(c,t,x)= \begin{cases}
\beta(c,t,1) \quad \mbox{for }x \ge 1 \\
\beta(c,t,0) \quad \mbox{for }x \le 0.
\end{cases}
\label{eq:betatutto}
\end{equation}

\begin{lemma} \label{lm3.1}
Given $c \in J$, let $x$ be a
solution of \eqref{eq:eq} satisfying conditions \eqref{eq:limcomb}
and \eqref{eq:gcomb}. Assume that $x'(t) \ge 0$ [or $x'(t)\le 0$]
for each sufficiently large $t$. Then there exists $x'(+\infty)$.
\end{lemma}

\begin{proof}
For some fixed $c \in J$, let $x$ be a
solution of \eqref{eq:eq} and assume there exists $t_0$ such that
$x'(t) \ge 0$ for all $t \ge t_0$. For $\xi, t \in \mathbb{R} $, define
the functions
\[
G_2(\xi) :=\int_0^{\xi} g_2(s)\, ds \quad \mbox{and} \quad
H_2(t):= \frac12 {x'}^2(t) + G_2(x(t))
\]
with the function $g_2$ defined by (\ref{eq:star}). Since
\begin{align*}
H_2'(t) &=x'(t)x''(t) + g_2(x(t))x'(t)\\
&= x'(t)\bigl[\beta(c, t,x(t))x'(t) - g(t,x(t))
 + g_2(x(t))\bigr] \\
&= \beta(c, t,x(t)){x'}^2(t) +
 \bigl[g_2(x(t)) - g(t, x(t))\bigr]x'(t),
\end{align*}
we have  $H_2'(t)\ge 0$ for all $t \ge t_0$. Hence, there exists $
{\lim_{t \to +\infty}}H_2(t) \in [0, +\infty]$. On the other hand,
since $x'(t) \ge 0$ for all $t \ge t_0$, there exists also \ $ {
\lim_{t \to +\infty}} x(t) \in \mathbb{R} \cup \{+\infty\}.$
Therefore, since the function $G_2(\xi)$ is bounded and
increasing, there exists finite
 $ {\lim_{t \to +\infty}}G_2(x(t)) $. Consequently, also
 $  { \lim_{t \to +\infty}}x'(t) $ exists in
$[0,+\infty]$.

The case when $x'(t)\le 0$ for each $t$ sufficiently large can be
treated in a similar way, introducing the functions
\[
G_1(\xi) :=\int_0^{\xi} g_1(s)\, ds \quad \mbox{and} \quad
H_1(t):= \frac12 {x'}^2(t) + G_1(x(t))
\]
with $g_1$ defined by (\ref{eq:star}). Also in this case, it is
easy to show that $ H_1'(t) \ge 0$ for all $t \ge t_0$ and this
easily leads to the conclusion.
\end{proof}


\begin{lemma} \label{lm3.2}
Fix $c \in J$ and let $x$ be a solution of \eqref{eq:eq} subject
to conditions  \eqref{eq:limcomb} and \eqref{eq:gcomb}. Then:
\begin{itemize}
\item[(i)]
 If $x(t_0)\ge 1$ and $x'(t_0)>0$ for some $t_0$, then
 $x(+\infty)=+\infty$ and $x'(t)>0$ for every $t>t_0$.

\item[(ii)] If $x(t_0)\le \theta$ and $x'(t_0)<0$ for
some $t_0$, then $x(+\infty)=-\infty$ and $x'(t)<0$ for every
$t>t_0$.
\end{itemize}
\end{lemma}


\begin{proof} Fix $c \in J$.

\noindent (i)\  According to \eqref{eq:gcomb} and
(\ref{eq:betatutto}), we have
\[
x''(t_0)= \beta(c, t_0, x(t_0))x'(t_0)= \beta(c, t_0, 1)x'(t_0)>0.
\]
Take $\bar t >t_0$ such that $x''(t)>0 $ for all $t \in [t_0, \bar
t[$. Then $x'(t) >x'(t_0)$, implying $x(t) >1$ and
\[
x''(t)=\beta(c, t, 1)x'(t) \ge m_c x'(t_0) >0 \quad \mbox{ in }
[t_0,\bar t[
\]
so the same relation holds also for $x''(\bar t)$. Hence,
$x''(t)>0$ for all $t  \ge t_0$ and this yields $x'(t)\ge x'(t_0)>0$
and $x(+\infty)=+\infty$.

\noindent (ii)\ Since  $ x''(t_0)= \beta(c, t_0,x(t_0))x'(t_0) < 0 $,
reasoning as in (i), one finds
$x''(t)<0$ for all $t >t_0$. This implies $x'(t) < x'(t_0)< 0$ and
$x(+\infty)=-\infty$.
 \end{proof}


\begin{lemma} \label{lm3.3}
Given $c \in J$, let $x$ be a
solution of \eqref{eq:eq} subject to conditions \eqref{eq:limcomb}
and  \eqref{eq:gcomb}. If there exists $t_0 \ge 0$ such that
$x'(t_0)= 0$ and $\theta <x(t_0)<1$, then  $x'(t)<0$ for all $t
> t_0$ and $x(+\infty)=-\infty$. Moreover, if there exists $t_0\ge 0$ such
that $x'(t_0)<0$ then $x'(t) <0$ for all $t> t_0$ and $x(+\infty)=-\infty$.
\end{lemma}


\begin{proof}  As a consequence of \eqref{eq:gcomb}, if
$x'(t_0)=0$ and $\theta<x(t_0)<1$, then $x''(t_0)=  - g(t_0,
x(t_0))<0$, hence $x'(t)< 0$ for $t$ in a right neighborhood of
$t_0$. Similarly, if $x'(t_0)<0$, then by \eqref{eq:limcomb} we
have $x''(t_0)= \beta(c,t_0,x(t_0))x'(t_0) - g(t_0, x(t_0))<0$,
hence again $x'(t)< 0$ for $t$ in a right neighborhood of $t_0$,
where $x''(t)<0$. This implies that $x'(t)<0$ for all
$t> t_0$ and $x(+\infty)=-\infty$.
\end{proof}


In the next section, in order to apply a shooting method, we shall
need to introduce the following Cauchy problem on the positive
half-line
\begin{equation}
\begin{gathered}
x''-\beta(c,t,x)x'+g(t,x)=0, \quad t \ge 0 \\
x(0)=\theta, \quad x'(0)=a
\end{gathered}
\label{eq:Pca}
\end{equation}
where $a$ denotes a positive real number.

We can sum up the results of the present section in the following
statement.


\begin{corollary} \label{coro3.4}
Let $x$ be a solution of problem \eqref{eq:Pca}, satisfying conditions
\eqref{eq:limcomb} and \eqref{eq:gcomb}. Then only one of the following
four situations may occur:
\begin{itemize}

\item[(a)] There exists $t_0>0$ such that $x'(t_0)=0$ and
$\theta<x(t_0)<1$, implying $x(+\infty)=-\infty$.

\item[(b)] There exists $t_0>0$ such that $x(t_0)=1$ and
$x'(t_0)=0$; in this case also the function
\[
y(t)= \begin{cases} x(t) & \mbox{for }t \le t_0 \\
1& \mbox{for }t \ge t_0
\end{cases}
\]
is a solution of (\ref{eq:Pca}).

\item[(c)] There exists $t_0>0$ such that $x(t_0)=1$ and
$x'(t_0)>0$, implying $x(+\infty)=+\infty$ and $x'(t)>0$ for all
$t \ge 0$.


\item[(d)]  $x'(t)>0$ and $x(t) < 1$ for all positive
$t$, implying $x'(+\infty)=0$; therefore also $x''(+\infty)=0$ and
$x(+\infty)=1$.
\end{itemize}
\end{corollary}


\begin{proof}  In the case (d), by Lemma \ref{lm3.1} we have
$x'(+\infty)=0$. So, by \eqref{eq:gcomb} we have
${\limsup_{t\to +\infty}} \, x''(t)\le -{\lim_{t \to +\infty}} \tilde
g(x(t))= - {\lim_{\xi \to x(+\infty)}} \tilde g(\xi)$. Since
$\tilde g(x)>0$ in $]\theta,1[$, we get $x(+\infty)=1$ and
$x''(+\infty)=0$.
\end{proof}

\section{A shooting method approach for positive times}

Given $c \in J$, for each $a>0$ let us consider the
boundary value problem \eqref{eq:Pca} on the positive half-line and
define the following subsets of $]0, +\infty[$.
\begin{gather*}
A_c=\{a>0: \mbox{ each solution }
 x_a \mbox{ of  \eqref{eq:Pca}  satisfies } x_a(+\infty)=-\infty \} \\
B_c=\{a>0: \mbox{ each solution } x_a \mbox{ of \eqref{eq:Pca} satisfies }
x_a(+\infty)=+\infty \}.
\end{gather*}
By means of a shooting technique and taking Corollary \ref{coro3.4} into
account we shall provide now sufficient conditions implying  that
the sets $A_c$ and $B_c$ are non-empty for some $c \in J$ (see
Theorems \ref{thm4.1} and \ref{thm4.2}), finding estimates, dependent on the
parameter $c$, for $\sup A_c$ and $\inf B_c$.


\begin{theorem} \label{thm4.1}
Consider equation \eqref{eq:eq} subject to conditions \eqref{eq:limcomb} and
\eqref{eq:gcomb}. Let  $c \in J$ be given such that
\begin{equation}
M_c < \frac{\sqrt{2\int_{\theta}^1 g_1(s)\, ds}}{1-\theta}.
\label{eq:McperA}
\end{equation}
Then $A_c$ is non-empty and
$A_c \supseteq \Big]0, -M_c(1-\theta)+\sqrt{2\int_{\theta}^1
g_1(s)\, ds } \ \Big]$.
\end{theorem}

\begin{proof}
Let $c \in J$ be fixed. Given $a>0$, assume that $a\notin A_c$. Hence
there exists at least a solution $y_a$ of
problem \eqref{eq:Pca} such that $y_a(+\infty) \ne -\infty$.
Therefore, according to Corollary \ref{coro3.4}, we have $y_a(+\infty)=1$ or
$y_a(+\infty)=+\infty$. In order to simplify notations, we shall
omit, during this proof the dependence on $a$ of $y$.

First assume $y(+\infty)=1$. Applying Lemma \ref{lm3.1},
we get  $y'(+\infty)=y''(+\infty)=0$. Integrating the equation
\eqref{eq:eq} in $[0, +\infty[$ we then obtain
\begin{equation}
a+ \int_0^{+\infty}\beta(c, s, y(s)) y'(s)\, ds - \int_0^{+\infty}g(s, y(s))\, ds =0.
\label{eq:A1}
\end{equation}
Since $y'\in L^1(0, +\infty)$ and $\beta(c,\cdot,y(\cdot)) \in
L^\infty(0, +\infty)$, we have
$\beta(c,\cdot,y(\cdot))y'(\cdot)\in L^1(0, +\infty)$, so also
$g(\cdot,y(\cdot))\in L^1(0, +\infty)$; hence the integrals in
(\ref{eq:A1}) are finite.

Now let us multiply \eqref{eq:eq} by $y$ and integrate by parts on
$[0, + \infty[$. Since $y'(+\infty)=0$, we obtain
\begin{equation}
\theta a+ \int_0^{+\infty}\!\!\!\!\! {y'}^2(s)\, ds +
\int_0^{+\infty}\!\!\!\!\! \beta(c, s, y(s))y(s)y'(s)\, ds -
\int_0^{+\infty}\!\!\!\!\! g(s, y(s))y(s)\, ds =0. \label{eq:A2}
\end{equation}
Since $y(t)<1$, we have $\beta(c,s,y(s))y(s)y'(s)\le
\beta(c,s,y(s))y'(s) $ and $g(s,y(s))y(s)\le g(s,y(s))$ for every
$s \ge 0$, then we get $\beta(c,\cdot,y(\cdot))y(\cdot)y'(\cdot)$,
$g(\cdot,y(\cdot))y(\cdot)\in L^1(0, +\infty)$. Hence, all the
integrals appearing in (\ref{eq:A2}) are finite.

 Finally, multiply \eqref{eq:eq} by $y'$ and integrate on
$[0, +\infty[$; we find
\begin{equation}
\frac12 a^2+ \int_0^{+\infty}\beta(c, s, y(s)){y'}^2(s)\, ds -
\int_0^{+\infty}g(s, y(s))y'(s)\, ds =0. \label{eq:A3}
\end{equation}
Note that, according to Corollary \ref{coro3.4} we have $y'(t)> 0$ for all
$t \ge 0$. Consequently, we have $ g(t,y(t))y'(t) \le
g_2(y(t))y'(t)$ for all $ t\ge 0 $, hence
\[
\int_0^{+\infty}g(s, y(s))y'(s)\, ds \le \int_{\theta}^1 g_2(\xi) \, d\xi < +\infty
\]
and this ensures that also the integrals appearing in
(\ref{eq:A3}) are finite.
Subtracting (\ref{eq:A1}) from (\ref{eq:A2}) we obtain
\begin{align*}
(\theta - 1)a+ \int_0^{+\infty}{y'}^2(s)\, ds -
\int_0^{+\infty}\beta(c, s, y(s))\bigl[1-y(s)\bigr]y'(s)\, ds &\\
-\int_0^{+\infty}g(s, y(s))\bigl[ y(s)-1\bigr]\, ds &=0.
\end{align*}
According to \eqref{eq:limcomb}, this implies
$$
\int_0^{+\infty}{y'}^2(s)\, ds <
a(1-\theta)+M_c\int_0^{+\infty}\bigl[1-y(s) \bigr]y'(s)\, ds=
 a(1- \theta)+\frac{M_c}{2}(1-\theta)^ 2.
 $$
Therefore, by (\ref{eq:A3}),  since $y'(t)>0$ for all positive
$t$, we get
\begin{align*}
 \int_{\theta}^1 g_1(\xi)\, d\xi
&\le \int_0^{+\infty} g(s, y(s))\, y'(s)\, ds
= \frac12 a^2 +\int_0^{+\infty} \beta (c, s, y(s))\, {y'}^2(s)\, ds \\
&\le \frac12 a^2 + M_c\int_0^{+\infty}{y'}^2(s)\, ds < \frac12 a^2
+M_c(1 - \theta)a + \frac{M_c^2}{2}(1- \theta)^2.
\end{align*}
Finally, given $a>0$ and $c \in J$ with $a \not \in A_c$, assuming
$y(+\infty)=1$ for at least one solution of problem \eqref{eq:Pca},
we obtain the following relation between the parameters of the
dynamic
\begin{equation}
a^2 + 2M_c(1- \theta)a + M_c^2(1- \theta)^ 2 -2\int_{\theta}^ 1
g_1(s)\, ds > 0; \label{eq:A5}
\end{equation}
that is
$$
a > -M_c(1-\theta)+\sqrt{2\int_{\theta}^1 g_1(s)\, ds }.
$$
Hence, if (\ref{eq:McperA}) holds, the set $A_c$ is nonempty and
the assertion follows.


Consider now the remaining case when $y(+\infty)=+\infty$ for a
solution $y$ of \eqref{eq:Pca}. Obviously there exists a positive
value $t_1$ such that $y(t_1)=1$ and $y(t)<1$ for $0\le t \le
t_1$.
 According to Corollary \ref{coro3.4}  we
have $y'(t) > 0$ for all $t\in [0, t_1[ $.
Integrating the equation \eqref{eq:eq} in $[0, t_1]$ we  obtain
\begin{equation}
a - y'(t_1)+ \int_0^{t_1}\beta(c, s, y(s)) y'(s)\, ds
- \int_0^{t_1}g(s, y(s))\, ds =0.
\label{eq:A6}
\end{equation}
Again multiplying \eqref{eq:eq} by $y$
and integrating by parts on $[0, t_1]$ we have
\begin{equation}
\theta a - y'(t_1)+ \int_0^{t_1}\!\!\!\!\!\! {y'}^2(s)\, ds +
\int_0^{t_1}\!\!\!\!\!\! \beta(c, s, y(s))y(s)y'(s)\, ds
-\int_0^{t_1}\!\!\!\!\!\! g(s, y(s))y(s)\, ds =0. \label{eq:A7}
\end{equation}
Consequently, subtracting (\ref{eq:A6}) from (\ref{eq:A7}) we
obtain
$$
\int_0^{t_1}y'^2(s) ds < a(1-\theta) + M_c\int_0^{t_1} [
(1-y(s)]y'(s) ds \le a(1-\theta) + \frac{M_c}{2}(1-\theta)^2.
$$
Multiplying now \eqref{eq:eq} by $y'$ and integrating on
$[0, t_1]$ we have
$$
\frac12 a^2 - \bigl[y'(t_1)\bigr]^2+ \int_0^{t_1}\beta(c, s,
y(s)){y'}^2(s)\, ds - \int_0^{t_1}g(s, y(s))y'(s)\, ds =0.
$$
Reasoning as before we again arrive to  relation (\ref{eq:A5}).
Hence the conclusion holds also in this case.
\end{proof}


\begin{theorem} \label{thm4.2}
Consider equation
\eqref{eq:eq} subject to conditions \eqref{eq:limcomb} and
\eqref{eq:gcomb}. For every $c\in J$, the set $B_c$ is nonempty.
In particular we have
\[
B_c \supseteq \Bigg[\sqrt{2\int_{\theta}^1 g_2(s)\, ds },
+\infty\Bigg[.
\]
\end{theorem}


\begin{proof}  Fix $c \in J$.  Given $a >0$, assume that $a \notin
B_c$. Then, according to Corollary \ref{coro3.4}, a solution $y_a$ of
problem \eqref{eq:Pca} exists such that $y_a(+\infty)=1$ or
$y_a(+\infty)=-\infty$. In both cases there exists $t_0 \in ]0,
+\infty]$ such that $y'_a(t_0)=0$; in fact, when $y_a(+\infty)=1$
we have $y'(+\infty)=0$  by Corollary \ref{coro3.4}. In addition, Lemma \ref{lm3.3}
implies that $y'_a(t)>0$ for all $0 \le t <t_0$.
To simplify notation, as in the proof of Theorem \ref{thm4.1}, we
shall denote $y_a$ and $y'_{a}$ respectively by $y$ and $y'$.

Let us multiply \eqref{eq:eq} by $y'$ and integrate on $[0, t_0]$;
we obtain
\begin{equation}
\frac12 a^2 + \int_0^ {t_0} \beta(c, s, y(s)\, {y'}^2(s)\, ds -
\int_0^{t_0}g(s, y(s))y'(s)\, ds=0. \label{eq:B1}
\end{equation}
Since
\[
\int_0^{t_0}g(s, y(s))y'(s)\, ds  \le \int_0^{t_0}g_2(y(s))y'(s)\,
ds \le \int_\theta^1 g_2(\xi)\, d\xi < +\infty ,
\]
even if  $t_0=+\infty$ both the integrals in (\ref{eq:B1}) are
finite. Moreover, since
\[
\int_0^ {t_0} \beta(c, s, y(s))\, {y'}^2(s)\, ds >0,
\]
we have
\[
\frac12 a^2 < \int_0^{t_0}g(s, y(s))y'(s)\, ds   \le \int_\theta^1
g_2(\xi)\, d\xi
\]
implying
$a < \sqrt{2\int_{\theta}^1 g_2(s)\, ds }$.
\end{proof}

\section{Compactness of solution sets}

In this section we shall  consider suitable families of solutions
of \eqref{eq:Pca} obtained when $a$ and $c$ vary in bounded sets.
Our aim is to prove their relative compactness in the Fr\'echet
space $C^1([0,+\infty[)$. To this purpose, given a real interval
$I\subset \mathbb{R}$, recall that a subset $A$ of $C^1(I)$ is
bounded if and only if there exists a positive continuous function
$\Phi:I \to \mathbb{R} $ such that
\[
\vert x(t)\vert + \vert x'(t) \vert \le \Phi(t) \quad
\mbox{for all }x \in A \mbox{ and }t \in I.
\]
Moreover, according to Ascoli's theorem, the relative compactness
of $A$ in $C^1(I)$ is guaranteed by the  boundedness combined with
the equicontinuity, at each $t \in I$, of the derivatives of all
$x \in A$.

Hence, the relative compactness of a family $A$ of functions in
$C^1(I)$ is ensured by the existence of a function $\Phi\in
C^0(I)$ such that
\[
\vert x(t)\vert + \vert x'(t) \vert + \vert x''(t) \vert \le
\Phi(t) \quad \mbox{for all }x \in A \mbox{ and }t \in I.
\]
Indeed, since
$x(0)=\theta$ for every solution of \eqref{eq:Pca}, in this case it suffices to
prove that
\[
\vert x'(t) \vert + \vert x''(t) \vert \le \phi(t) \quad \mbox{for
all }x \in A \mbox{ and }t \in I
\]
for some function $\phi \in C^0([0,+\infty[)$, since
we have $|x(t)| \le \theta + \int_0^t \phi(\tau) d\tau$ for every
$t>0$.

\begin{proposition} \label{prop5.1}
Let $C\subset J$ and $I\subset ]0,+\infty[$ be two bounded intervals,
with $\inf C > \inf J$. Then, each family  $X=(x_{c,a})_{c,a}$  of
solutions of \eqref{eq:Pca} with $c \in C$ and $a \in I$ is relatively compact in
$C^1([0,+\infty[)$.
\end{proposition}

\begin{proof} Let $\bar c = \sup C$, $\bar a = \sup I$,
and $\bar{g_2}=  {\sup_{x \in [0,1]}} g_2(x)$.
Moreover, let
\begin{gather*}
X_+:=\{ x_{c,a} \in X : x_{c,a}(+\infty)=+\infty\},\\
X_-:=\{ x_{c,a} \in X : x_{c,a}(+\infty)=-\infty\},
X_1:=\{x_{c,a} \in X : x_{c,a}(+\infty)=1\}.
\end{gather*}
By virtue of Corollary \ref{coro3.4}, we have $X= X_+\cup X_- \cup X_1$, so it
suffices to prove that these three subfamilies are relatively
compact.

Note that for every $x_{c,a} \in X_+$ we have \ $ x_{c,a}'(t) \ge
0 $ for every $t\ge 0$ (see Lemma \ref{lm3.3}), hence from
\eqref{eq:gcomb} we deduce
\[
x_{c,a}^{''}(t) \le \beta(c, t, x_{c,a}(t))x_{c,a}^{'}(t) \le M_c
x_{c,a}^{'}(t) \le  M_{\bar c} x_{c,a}^{'}(t), \quad \mbox{for
every } t \ge 0 .
\]
Therefore, $ 0<x_{c,a}^{'}(t) \le \bar a \mbox{e}^{ M_{\bar c}
t}$. Then,  for every $t \ge 0$ we have $  - \bar{g_2} \le
x_{c,a}^{''}(t) \le M_{\bar c} \bar a \mbox{e}^{M_{\bar c}t} $.
Hence, $X_+$ is relatively compact.

Observe now that for every $x_{c,a}\in X_1$ we have
$x_{c,a}'(+\infty)=0$, therefore $x_{c,a}'$ has a maximum on
$[0,+\infty[$ attained at a point $t_0$ which obviously depends on
both $c$ and $a$. We have two possibilities, either $t_0=0$ and
$x_{c,a}'(t)\le x_{c,a}'(0)=a$ for all $t \ge 0$, or $t_0>0$ and $
x_{c,a}^{''}(t_0)=0$. In the latter case, since $\tilde c:=\inf C
>\inf J$, and consequently  $m_{\tilde c}>0$,
we have
$$
x_{c,a}'(t_0)=\frac{g(t_0, x_{c,a}(t_0))}{\beta(c, t_0,
x_{c,a}(t_0))} \le \frac{\bar g_2}{m_{\tilde c}}.
$$
So, put $H:= \frac{\bar g_2}{m_{\tilde c}}$, we deduce
$ 0< x_{c,a}'(t) \le\max\{\bar a, H\}\le \bar a +H $, implying
$$
-\bar{g_2} \le
x''_{c,a}(t) \le M_{\bar c}(\bar a+H) \quad \mbox{ for every } t
\ge 0.
$$
Hence, also $X_1$ is relatively compact.

Finally, let us consider the family $X_-$. Similarly to what done
above, it is easy to show that $ x_{c,a}'(t) \le  \bar a +H $, for
$t \ge 0$, for every $x_{c,a}\in X_-$.  Moreover, in the
half-lines $[t_0, +\infty[$ where $x_{c,a}'$ is  negative and
$x_{c,a}'(t_0)=0$, we have $ x_{c,a}{''}(t) \ge M_{\bar c}
x_{c,a}'(t) - \bar g_2 $, implying $ x_{c,a}'(t)\ge  \frac{\bar
g_2}{M_{\bar c}}\bigl(1-\mbox{e}^{M_{\bar c}t}\bigr)$.
 We have then obtained
\[
\frac{\bar g_2}{M_{\bar c}}\bigl(1-\mbox{e}^{M_{\bar c}t}\bigr)
\le x_{c,a}'(t) \le \bar a +H \quad \mbox{for all } t \ge 0.
\]
Consequently we have
\[
-\bar{g_2} \mbox{e}^{M_{\bar c}t}\le x_{c,a}{''}(t) \le M_{\bar
c}(\bar a +H) \quad \mbox{for } t\ge 0.
\]
Then, also $X_-$ is relatively compact in $C^1([0,+\infty[)$.
\end{proof}

\section{Proofs of the main results}

We are now ready to provide the proofs of the results stated in
Introduction.


\begin{proof}[Proof of Theorem \ref{thm1.1}]
First we prove, under conditions
(\ref{eq:thm1.1-1}) and (\ref{eq:thm1.1-2}), the existence of at
least a value $c^*$ for which problem \eqref{eq:P} has a solution
$x$  satisfying $x(0)=\theta$. To this aim, according to
Theorem \ref{thm2.4}, it suffices to prove the existence of at least a value $c^*$
for which the Cauchy problem on $[0,+\infty[$
\begin{equation} \label{Pclc}   %% (P_{c, \lambda(c)})
\begin{gathered}
x''-\beta(c,t,x)x'+g(t,x)=0, \quad t \ge 0 \\
x(0)=\theta, \quad x'(0)=\lambda(c)
\end{gathered}
\end{equation}
admits a solution $x$ satisfying $x(+\infty)=1$.
Set
$$
\hat A :=\{ c \in J\,:\, \lambda(c) \in A_c\} , \quad
\hat B :=\{ c \in J\,:\, \lambda(c) \in B_c\}.
$$
Note that assumption (\ref{eq:thm1.1-1}) easily implies that
condition (\ref{eq:McperA}) is satisfied, for all $c$ sufficiently
close to $\inf J$. Moreover, being $\lambda(c)\le \theta M_c$, we
have  $c \in \hat A$ for every $c$ sufficiently close to
$\inf J$. Similarly, by Theorem \ref{thm4.2}, being $\lambda(c)\ge m_c$ condition
(\ref{eq:thm1.1-2}) implies that $c\in \hat B$ for every $c$
sufficiently close to $\sup J$. Hence, both sets $\hat A$,
$\hat B$ are nonempty. Let us now show that they are open.

Assume, by contradiction, the existence of a point
$c_0 \in \hat A$ and a sequence $(c_n)_n$  converging to $c_0$ such that
$c_n\notin \hat A$ for every $n \in \mathbb{N}$. It is then possible to
find a corresponding sequence $(x_n)_n$ of solutions of problems
\begin{equation} \label{Pcnlcn} %(P_{c_n, \lambda(c_n)})
\begin{gathered}
x''-\beta(c_n,t,x)x'+g(t,x)=0, \quad t \ge 0 \\
x(0)=\theta, \quad x'(0)=\lambda(c_n)
\end{gathered}
\end{equation}
satisfying $ x_n(+\infty) \ne -\infty $.

Of course, the set $C=\{c_n\,: \, n \in N \}$ is bounded with
$\inf C > \inf J$. Moreover, owing to the continuity and
monotonicity of the function $\lambda(c)$  also
$I=\{\lambda(c_n)\, : \, n \in N \}$ is bounded. Hence, by
applying Proposition \ref{prop5.1} we deduce that $(x_n)_n$ is a
relatively compact subset of the Fr\'echet space
$C^1([0,+\infty[)$. It is then possible to extract a subsequence,
again denoted $(x_n)_n$, which converges in $C^1([0,+\infty[)$ to
a function $x$. Therefore, $x$ is a solution of the Cauchy problem
(\ref{Pclc}) with $c=c_0$ and since $c_0 \in \hat A$ we have
$x(+\infty)=-\infty$. On the other hand, by Corollary
\ref{coro3.4} we have that $x_n'(t) \ge 0$ for all $n \in N$ and
$t\ge 0$. Therefore $x'(t) \ge 0$ for all $t>0$, a contradiction.

Similarly, assume by contradiction the existence of a point $c_0
\in \hat B$ and a sequence $(c_n)_n$  converging to $c_0$ such
that $c_n \notin \hat B$ for every $n \in \mathbb{N}$. It is then
possible to find a corresponding sequence $(x_n)_n$ of solutions
of problems (\ref{Pclc}) with $c=c_n$ satisfying $ x_n(+\infty)
\ne +\infty $.

By the relative compactness of the set $(x_n)_n$, we can extract a
subsequence, again denoted $(x_n)_n$, which converges in
$C^1([0,+\infty[)$ to a function $x$. Therefore, $x$ is a solution
of the Cauchy problem (\ref{Pclc}) with $c=c_0$ and since $c_0 \in
\hat B$ we have $x(+\infty)=+\infty$. Hence, we have $x(t)>1$ for
all $t$ sufficiently large. On the other hand, by Lemma
\ref{lm3.3} and Corollary \ref{coro3.4} we have that $x_n(t) \le 1
$ for all $t \ge 0$, and this is a contradiction.

Therefore, since $\hat A $ and $\hat B$ are disjoint, nonempty and
open, there exists a value $ c^* \not \in \hat A \cup \hat B$. We
will now prove that problem \eqref{eq:P} is solvable for $c=c^*$.
Let us assume, by contradiction, that for every solution $x$ of
problem (\ref{Pclc}) with $c=c*$ we have $x(+\infty)\ne 1$. Set
\begin{gather*}
X^+:= \{ x \mbox{ is a solution of \eqref{Pclc} with $c=c^*$
and $x(+\infty)=+\infty$}\} \\
X^-:= \{ x \mbox{ is solution of \eqref{Pclc}  with $c=c^*$
and $x(+\infty)=-\infty$}\}.
\end{gather*}
Since $c^*\not \in \hat A \cup \hat B$, $X^+\ne \emptyset$ and
$X^- \ne \emptyset$.
Let
\begin{gather*}
\tau^+:=\sup \{ t : x(t)=1 \mbox{ for some } x \in X^+\}\in ]0,+\infty], \\
\tau^-:=\sup \{ t : x(t)=0 \mbox{ for some } x \in X^-\}\in
]0,+\infty].
\end{gather*}
Note that $\tau^+ =\tau^- = +\infty$. In
fact, let us consider the associate differential system
\begin{align*}
 y_1'(t)&=  y_2(t)\\
 y_2'(t)&=  \beta(c^*,t,y_1(t))y_2(t) -g(t,y_1(t))\\
 y_1(0)&=  \theta, \quad
 y_2(0)= \lambda(c^*)
\label{eq:2ndorder}
\end{align*}
and consider, for every $t>0$ the sections
$$
S_t:=\{ (\bar{y}_1,\bar{y}_2) \in \mathbb{R}^2 : y_1(t)=\bar{y}_1, y_2(t)=\bar{y}_2
\mbox{ for some solution $(y_1,y_2)$ of \eqref{eq:2ndorder}} \}.
$$
By classical results, each section $S_t$ is a continuum, that is a
nonempty, compact, connected set. Hence, if $\tau^+ \in \mathbb{R}$,
for every $ t>\tau^+ $ the section $S_t$ is not a continuum. In
fact, each point $(\alpha, y_2)$ coming from a solution $x \in
X^+$ necessarily has $\alpha >1$. Moreover all points $(\gamma,
\tilde y_2)$ deriving from solutions $x \in X^-$ must have
$\gamma<1$, since otherwise by virtue of Corollary \ref{coro3.4} (b) problem
\eqref{Pclc} with $c=c^*$ also admits a solution satisfying
$x(+\infty)=1$, while we have just assumed $x(+\infty)\ne 1$ for
every solution $x$ of problem  \eqref{Pclc} with $c=c^*$.
Hence $S_t$ does not contain points $(1,y)$ for any $y \in \mathbb{R}$ and
this implies $S_t$ is not a continuum.

Therefore, $\tau^+=+\infty$ and by means of an analogous argument
we can show that also $\tau^-=+\infty$. Let us take now a
diverging sequence $(t_n)_n$ and a sequence of solutions $(x_n)_n$
of problem \eqref{Pclc} with $c=c^*$ , such that $x_n(t_n)=1$ for
every $n\in \mathbb{N}$. By the relative compactness of the sets
of solutions, we deduce the existence of a subsequence, again
denoted $(x_n)_n$, converging to a function $x$ in
$C^1([0,+\infty[)$. Hence, also $x$ is a solution of \eqref{Pclc}
with $c=c^*$, and it satisfies $x(+\infty)=1$.


Thus, we have proved the existence of a solution $x$ of problem
\eqref{eq:P}, for $c=c^*$, satisfying $x(0)=\theta$.


Now, for every $\tau \in \mathbb{R}$, let us consider the functions
$$
\tilde\beta(c,t,x):=\beta(c,t+\tau,x)\,, \quad
\tilde g(t,x):= g(t+\tau,x).
$$
As it is easy to verify, $\tilde \beta$ and $\tilde g$ satisfy all
the assumptions on $\beta$ and $g$, i.e. conditions
\eqref{eq:limcomb}, \eqref{eq:betamon}, (\ref{eq:thm1.1-1}) and
(\ref{eq:thm1.1-2}). Hence, a value $c_{\tau} \in J$ exists such
that
\begin{gather*}
x''-\tilde \beta(c_{\tau},t,x) + \tilde g(t,x)=0  \\
 x(-\infty)=0,\ x(+\infty)=1
\end{gather*}
admits a solution $\tilde x$ with $\tilde x(0)=\theta$. Therefore,
the shifted function $x_\tau(t):=\tilde x(t-\tau)$ is a solution
of problem \eqref{eq:P}, with the same $c_{\tau}$, satisfying
$x_\tau(\tau)=\theta$.

Under the additional monotonicity conditions (\ref{eq:MON}), now
it remains to prove the uniqueness of $c_{\tau}$ for any given
real $\tau$. To this aim we reason by  contradiction and we assume
that for  a fixed $\tau \in \mathbb{R}$ there exist two parameters
$c_1<c_2$ in $J$ as well as two corresponding solutions $x_1(t)$
and $x_2(t)$ of problem \eqref{eq:P} satisfying
$x_1(\tau)=x_2(\tau)=\theta$. Since the functions $\tilde x_i(t)$
with $i=1,2$, defined as before, are solutions of the same
boundary value problem with $\tilde \beta$ and $\tilde g$
satisfying all the required conditions and $\tilde x_i(0)=\theta$,
we can apply Lemma \ref{lm2.1} and Corollary \ref{coro3.4} in
order to obtain $x_i' (t)>0$ for all $t \in \mathbb{R}$ such that
$x_i(t)<1$ and $i=1,2$. Hence the inverse functions $t_i:]0,1[
\rightarrow \mathbb{R}$ $i=1,2$ exist and satisfy
$$
\lim_{x \to 0^+} t_i(x)=-\infty, \quad \lim_{x \to
1^-}t_i(x)=T_i\in ]0, +\infty]
$$
with $x_i(T_i)=1$. We put now, for $i=1,2$ and $x \in [0,1]$,
$$
\beta_i(x):=\beta(c_i, t_i(x), x) \quad g_i(x):=g(t_i(x), x)
\quad \mbox{and} \quad z_i(x):=x_i' (t_i(x)).
$$
It is easy to see that $z_i$ satisfies
\begin{equation}
\dot z = \beta_i(x)-\frac{g_i(x)}{z}, \quad x \in ]0,1[ \quad
\big( \,\, \dot{} = \frac{d}{dx} \, \big) . \label{eq:Z}
\end{equation}
Moreover, according to Lemma \ref{lm3.1}, we have $x_i'(\pm \infty)=0$,
implying $z_i(0)=z_i(1)=0$, $i=1,2$. As a consequence of Theorem
2.4 and condition (\ref{eq:MON}(i)) one has
$z_2(\theta)-z_1(\theta)=\lambda(c_2)-\lambda(c_1)\ge 0$ and
$$
\dot z_2(\theta)-\dot z_1(\theta)=\beta(c_2, \tau, \theta)-\beta(c_1,
\tau, \theta)-\frac{g(\tau, \theta)}{\lambda(c_2)}+ \frac{g(\tau,
\theta)}{\lambda(c_1)}>0.
$$
Moreover, since $z_2(x)>z_1(x)$ we have
$$
t_2(x)-\tau =\int_{\theta}^ {x} t_2' (\xi)\, d\xi=\int_{\theta}^
{x} \frac{d\xi}{z_2(\xi)}<\int_{\theta}^ {x}
\frac{d\xi}{z_1(\xi)}=\int_{\theta}^ {x}t_1' (\xi)\,
d\xi=t_1(x)-\tau,
$$
hence, according to \eqref{eq:limcomb}, (\ref{eq:MON}(ii)) and
(\ref{eq:MON}(iii)) we get
$$
\dot z_2(x)=\beta(c_2, t_2(x), x)-\frac{g(t_2(x), x)}{z_2(x)}\ge
\beta(c_1, t_1(x), x)-\frac{g(t_1(x), x)}{z_1(x)}=\dot z_1(x).
$$
Consequently, $z_1(x)>z_2(x)$ for all $x \in ]\theta, 1]$ in
contradiction with $z_1(1)=z_2(1)=0$.
 \end{proof}



\begin{proof}[Proof of Theorem \ref{thm1.2}]
Take $\tau_1<\tau_2$ and assume there are two solutions $x_1$ and $x_2$
of problem \eqref{eq:P} corresponding to the same parameter $c$ and such that
$x_1(\tau_1)=x_2(\tau_2)=\theta$. Reasoning as in the proof of
Theorem \ref{thm1.1}, we introduce $t_i(x)$, with $t_i(\theta)=\tau_i$,
$\beta_i(x)$ with $c_1=c_2=c$, $g_i(x)$ and $z_i(x)$ for $i=1,2$.
We recall that $z_i(x)$ satisfies (\ref{eq:Z}) on $]0,1[$ as well
as $z_i(0)=z_i(1)=0$. Moreover, by conditions \eqref{eq:gcomb} we
also have
$$
\dot z_i(x)=\beta_i(x) \quad \mbox{for all } x \in ]0, \theta[.
$$

First we show that $z_1(\theta)<z_2(\theta)$ leads to a
contradiction. Indeed, since $\tau_1<\tau_2$ and according to the
strict monotonicity of $\beta$ with respect to $t$, we obtain $
\dot z_1(\theta^-)=\beta(c, \tau_1, \theta)>\beta(c, \tau_2,
\theta)=\dot z_2(\theta^-)$. Moreover, assuming $ \dot z_1(x)>
\dot z_2(x)$ for all $x \in ]\bar x, \theta[$ with $0<\bar x <
\theta$, we get $z_1(x)< z_2(x)$ in the same interval and
$$
\tau_1 - t_1(x)= \int_{x}^ {\theta} t_1' (\xi) \, d\xi=\int_{x}^
{\theta} \frac{d\xi}{z_1(\xi)}>\int_{x}^ {\theta}
\frac{d\xi}{z_2(\xi)}=\int_{x}^ {\theta}t_2' (\xi)\,
d\xi=\tau_2-t_2(x)
$$
implying $t_2(x)>t_1(x)$ for all $x \in [\bar x, \theta]$.
Therefore \ $ \dot z_1(\bar x)=\beta(c, t_1(\bar x), \bar
x)>\beta(c, t_2(\bar x), \bar x)=\dot z_2(\bar x).$ Hence $\dot
z_1(x)>\dot z_2(x)$ for all $x \in ]0, \theta[$ but this is in
contradiction with $z_1(0)=z_2(0)=0$. We have then proven that
$z_1(\theta)\ge z_2(\theta)$.

Consequently we have $\dot z_1(\theta^+)-\dot z_2(\theta^+)>0$,
and assuming $\dot z_1(x)-\dot z_2(x)>0$ for all $x \in [\theta,
\bar x[$, with $\theta < \bar x <1$, we can reason as before and
obtain $t_1(x)<t_2(x)$ in $[\theta, \bar x[$. Proceeding as in the
proof of Theorem \ref{thm1.1} this leads to a contradiction with
$z_1(1)=z_2(1)=0$ and we have proven that $\tau \rightarrow
c_{\tau}$ is an injective function. Now it remains to show that it
is also continuous.

First notice that $c_{\tau} >\inf J$ for all $\tau \in
\mathbb{R}$. In fact, consider again the functions $\tilde \beta$,
$\tilde g$ and $\tilde x$ introduced in the proof of Theorem
\ref{thm1.1}. Since also $\tilde \beta$ and $\tilde g$
respectively satisfy conditions \eqref{eq:limcomb},
\eqref{eq:gcomb} and \eqref{eq:betamon}, then Theorem \ref{thm2.4}
implies $\theta m_{c_{\tau}}\le \tilde x' (0) =x'
(\tau)=\lambda(c_{\tau})\le \theta M_{c_{\tau}}$. Moreover,
according to Theorems \ref{thm4.1} and  \ref{thm4.2}, we get
$$
M_{c_{\tau}} > \sqrt{2\int_{\theta}^ 1 g_1(s)\, ds}, \quad
m_{c_{\tau}}<\frac{\sqrt{2\int_{\theta}^ 1 g_2(s)\, ds}}{\theta}.
$$
Consequently, conditions (\ref{eq:thm1.1-1}), (\ref{eq:thm1.1-2})
and the monotonicity of both $m_c$ and $M_c$ imply that the image
set $C=\{c_{\tau}: \tau \in \mathbb{R} \}$ is  bounded with $\inf
C>\inf J$.

Given $\tau_0 \in \mathbb{R}$, suppose now the existence of
$\tau_n \to \tau_0$ as $n \to +\infty$ and such that
$c_{\tau_n}\not \rightarrow c_{\tau_0}$. Since $c_{\tau_n}$ is
bounded, with no loss of generality we can assume that
$c_{\tau_n}\to \bar c\in J$ with $\bar c \neq c_{\tau_0}$. Let
$(x_n)_n$ be a sequence of solutions of \eqref{eq:P} with
$c=c_{\tau_n}$ and $x_n(\tau_n)=\theta$. For all $n \in
\mathbb{N}$, define $\tilde x_n(t):=x_n(t+\tau_n)$. As it is easy
to see, each $\tilde x_n$ is a solution of the problem
\begin{gather*}
x''-\beta(c_n,t+\tau_n,x)x'+g(t+\tau_n,x)=0 \\
x(-\infty)=0, \quad x(+\infty)=1, \quad x(0)=\theta.
\end{gather*}
Moreover, by Theorems \ref{thm2.4} and  \ref{thm4.2} which are valid also when
$\beta(c,t,x)$ and $g(t,x)$ are respectively replaced by $\beta(c,
t+\tau_n, x)$ and $g(t+\tau_n, x)$, we have
$$
0<\tilde x_n' (0)<\sqrt{2\int_{\theta}^ 1 g_2(s)\, ds}.
$$
Since, in addition, $c_{\tau_n} > \inf J$ and $c_{\tau_n}$ is
bounded, we can reason as in the proof of Proposition \ref{prop5.1} in order
to obtain that $(\tilde x_n)_n$ is relatively compact in the
Fr\'echet space $C^1([0, +\infty[)$.

It is then possible to extract a subsequence, again denoted
$(\tilde x_n)_n$, which converges to $\tilde x$. According to the
continuity of the function $\lambda$, as shown in Theorem \ref{thm2.4}, and
since $\tau_n \rightarrow \tau_0$ when $n \to +\infty$, then
$\tilde x(t)$ is a solution of
\begin{gather*}
x''-\beta(\bar c,t+\tau_0,x)x'+g(t+\tau_0,x)=0 \\
x(0)=\theta, \quad x'(0)=\lambda(\bar c).
\end{gather*}
Moreover $0\le \tilde x(t)\le 1$ and $\tilde x' (t)\ge 0$ for all
$t \in \mathbb{R}$. Therefore by Lemma \ref{lm3.1} and condition
\eqref{eq:gcomb}, we obtain $\tilde x(+\infty)=1$. In addition, by
the definition of $\lambda$, we also have $\tilde x(-\infty)=0$.
Hence the associated function $x(t):=\tilde x(t-\tau_0)$ is a
solution of problem \eqref{eq:P} with $c=\bar c$ and
$x(\tau_0)=\theta$ in contradiction with the uniqueness of
$c_{\tau_0}$.
 \end{proof}


\begin{proof}[Proof of Corollary \ref{coro1.3}]
By virtue of what observed in Introduction, we have only to prove
the uniqueness of the solution of \eqref{eq:P} for $c=c^*$, up to
a time-shift. Take in fact $x_1(t)$ and $x_2(t)$ satisfying
\eqref{eq:P}, with $c=c^*$, as well as
$x_1(\tau)=x_2(\tau)=\theta$. Then $x_i'(t)>0$ for all $t \in
\mathbb{R}$ such that $x_i(t)<1$ and $i=1,2$ (see Lemma 2.1 and
Corollary \ref{coro3.4}). Therefore, reasoning as in the proof of
Theorem \ref{thm1.1} we can introduce two functions
$z_i(x)=(x_i'(t_i(x))$, where $t_i(x)$ is the inverse function of
$x_i$ for $i=1,2$, and they are both solutions of the problem
\begin{gather*}
\dot z=\beta(c^*,x)-\frac{g(x)}{z}\\
z(0)=z(1)=0.
\end{gather*}
Moreover $z_1(\bar x)<z_2(\bar x)$ for some $\bar x \in ]0,1[$
implies $\dot z_1(\bar x )=\beta(c^*,\bar x)-\frac{g(\bar
x)}{z_1(\bar x)}<\beta(c^*,\bar x)-\frac{g(\bar x)} {z_2(\bar
x)}=\dot z_2(\bar x)$ and this yields to the contradictory
conclusion $0=z_2(1)-z_1(1)>0$. Therefore $z_1(x)=z_2(x)$ for all
$x \in [0,1]$. Hence
\begin{align*}
t_1(x)-\tau &=\int_{\theta}^{x} t_1' (\xi)\, d\xi
=\int_{\theta}^{x} \frac{d\xi}{x_1' (t_1(\xi))}
=\int_{\theta}^{x}\frac{d\xi}{z_1(\xi)} \\
&= \int_{\theta}^{x}\frac{d\xi}{z_2(\xi)}
=\int_{\theta}^{x}\frac{d\xi}{x_2'(t_2(\xi))} =\int_{\theta}^x
t_2' (\xi)\, d\xi=t_2(x)-\tau
\end{align*}
and $x_1(t)=x_2(t)$ for all $t \in \mathbb{R}$. \end{proof}



\begin{proof}[Proof of Theorem \ref{thm1.4}]
 First of all, observe
that if problem \eqref{eq:P} is solvable for some $c$, then $c \ge
-h(0)$. In fact, if $c +h(0) <0$, then $c+h(x) < 0 $ for every
positive $x$ sufficiently small. Therefore, there exists a value
$\bar t$ such that  $x''(t) <0$ for every $t < \bar t$, and this
is a contradiction being $x(-\infty)=0$.

Since the problem is autonomous, in order to show the existence of
a unique $\lambda(c)$, we do not need any monotonicity assumption
on $h$. In fact, being $\dot z(x)= c + h(x) - {\frac{g(x)}{z(x)}}$,
the slope $\lambda(c)$ can be
computed explicitly:  $\lambda(c) = c \theta + \int_0^\theta h(s)
ds$. Hence, by \eqref{eq:nonex} we have
$$
\lambda(c) \ge \int_0^\theta h(s) ds - \theta h(0) \ge \sqrt{2 \int_\theta^1
g_2(s) ds} \quad \mbox{for every } c \ge -h(0)$$ and the
assertion is an immediate consequence of Theorem \ref{thm4.2}.
\end{proof}


\begin{proof}[Proof of Corollary \ref{coro1.5}]
First observe that we used
the monotonicity assumption \eqref{eq:betamon} on $[0,\theta]$
only in the analysis for negative times, in order to prove Theorem
2.4. But if the problem is autonomous, as we just noted in the
previous proof, the slope $\lambda(c)$ can be computed explicitly
and it trivially satisfies all the properties proved in Theorem
2.4. So, we can avoid the monotonicity assumption on function
$h(x)$.

Now, note that all the other assumptions of Theorem \ref{thm1.1} are
satisfied taking $J=]-m,+\infty[$ (see Introduction); hence a
$c^*>-m$ exists for which \eqref{eq:P} is solvable. Since
$M_{c^*}=c^* + M$, by Theorems 4.1 and 4.2 we deduce that
$$
-(c^*+ M)(1-\theta) + \sqrt{ 2 \int_\theta^1 g_1(s) ds} < \lambda(c^*) <
\sqrt{ 2 \int_\theta^1 g_2(s) ds}.
$$
But in this case we get $\lambda(c^*)= c^*\theta + \int_0^\theta
h(s) ds$, hence the assertion immediately follows.
\end{proof}

We conclude this paper with an application of our results to the
case when $\beta(c,t,x)=c+kx$, with $k>0$, and $g$ is a suitable
autonomous function. Such a situation occurs when studying the
existence of travelling wavefronts for equation (\ref{eq:rd}),
with $\frac{\partial H}{\partial u}=ku$, i.e. with a linear
convective speed.

\bigskip
\noindent{\bf Example 6.1} Let $h(x)=kx$, with $k>0$ constant, and
let
$$
g(t,x)=g(x)= \begin{cases}  0 & \mbox{ for } 0\le x \le \frac12 \\
-2x^2+3x-1 & \mbox{ for } \frac12 \le x \le 1.
\end{cases}
$$
Note that function $g$ satisfies \eqref{eq:gcomb} for
$\theta =1/2$. Moreover, $\int_{1/2}^1 g(s) ds = 1/24$.
Condition \eqref{eq:nonex} becomes $ k/8 \ge 1/(2\sqrt 3)$.
Hence, by applying Theorem \ref{thm1.4}, we deduce
that if $k \ge 4/\sqrt 3$ the problem
\begin{equation}
\begin{gathered}
x'' - (c+kx)x' + g(x) =0 \\
x(-\infty)=0 ,\quad x(+\infty)=1 ,\quad  0 \le x(t) \le 1
\end{gathered} \label{eq:example}
\end{equation}
has no solutions for any $c\in \mathbb{R}$. Instead, condition
(\ref{eq:lim}) becomes $ k < 1/(2\sqrt 3)$ and in this case
problem (\ref{eq:example}) is solvable for $c=c^*$ with
$$
\frac{1}{2\sqrt 3} - \frac{5}{8}k < c^* < \frac{1}{\sqrt 3}- \frac{k}{4}.
$$
Put $K:=\{ k \in \mathbb{R}: \mbox{ problem (\ref{eq:example}) is
solvable} \}$. The continuous dependence for the differential
equation in (\ref{eq:example}) on the parameters $c$ and $k$
implies that $K$ is an open set. Moreover by classical
comparison-type techniques, as employed in \cite{MathNach} and
\cite{Brno}, applied to the associated first order problem
\begin{gather*}
\dot z=c+kx-\frac{g(x)}{z}\\
z(0^+)=z(1^-)=0
\end{gather*}
one can show that $K$ is a connected set. Consequently, there
exists a threshold value $k^*$, with
$1/(2 \sqrt 3)<k^* \le 4/\sqrt 3$, such that (\ref{eq:example})
is solvable if and only if $k<k^*$.


\begin{thebibliography}{00}


\bibitem{AORW} R. P. Agarwal, D. O'Regan and P. J. Y. Wong.
{\it Positive Solutions of Differential, Difference and Integral Equations}.
Kluwer Academic Publishers, Dordrecht, 1999.

\bibitem{AW} D. G. Aronson and H. F. Weinberger.
{\it Multidimensional nonlinear diffusion arising in population
genetics}. Adv. Math. {\bf 30}  (1978), 33--76.

\bibitem{BNS} H. Berestycki, B. Nicolaenko and B. Scheurer.
{\it Traveling wave solutions to combustion models and their
singular limits}. Siam J. Math. Anal. {\bf 16}  (1985),
1207--1242.

\bibitem{DP} P. Dr\'abek. {\it Nonlinear eigenvalue problems
for the p-Laplacian in $\mathbb{R}^ N$}. Math. Nachr. {\bf 173} (1995),
131--139.

\bibitem{DR} P. Dr\'abek and C. G. Simader. {\it Nonlinear
eigenvalue problems for quasilinear equations in unbounded
domains. } Math. Nachr. {\bf 203} (1999), 5--30.

\bibitem{F} P. C. Fife.
{\it Mathematical Aspects of Reaction and Diffusing Systems}.
Lecture Notes in Biomathematics {\bf 28}, Springer-Verlag, New
York, 1979.

\bibitem {LLV} G. S. Ladde, V. Lakshmikantham and A. S. Vatsala.
{\it Monotone Iterative Techniques for Nonlinear Differential
Equations}. Pitman, Boston, 1985.

\bibitem{MathNach} L. Malaguti and C. Marcelli.
{\it Travelling wavefronts in reaction-diffusion equations with
convection effects and non-regular terms}. Math.
Nachr. {\bf 242} (2002), 1--17.

\bibitem{Brno} L. Malaguti and C. Marcelli.
{\it Heteroclinic orbits in plane dynamical systems}. Arch. Math.
(Brno) {\bf 38}, (2002), 183--200.

\bibitem{MI} L. Malaguti and C. Marcelli. {\it The influence of convective effects on front
propagation in certain diffusive models}, in ``Mathematical
Modelling \& Computing in Biology and Medicine'', 5th ESMTB
Conference 2002, (V. Capasso ed.), Grenoble 2003, 362-367.

\bibitem {Murray} J. D. Murray. {\it Mathematical Biology}.
Springer--Verlag, Berlin, 1993.

\bibitem{O'R} D. O'Regan. {\it Existence
Theory for Nonlinear Ordinary Differential Equations}. Kluwer
Academic Publishers, Dordrecht, 1997.


\bibitem{S} K. W. Schrader.
{\it Solutions of second order ordinary differential equations}.
J. Diff. Eqs. {\bf 4}  (1968),  510--518.

\bibitem{TR} C. Tretter. {\it Boundary eigenvalue problems for differential
equations $N\eta =\lambda P\eta $ with $\lambda$ polynomial
boundary conditions}. J. Diff. Eqs. {\bf 170}, (2001), 408--471.

\bibitem{VS} V. A. Volpert and YU. M. Suhov.
{\it Stationary solutions of non-autonomous Kolmogorov-Petrovsky-Piskunov
equation}. Ergod. Th. \& Dynam. Sys. {\bf 19} (1999), 809--835.


\end{thebibliography}

\end{document}
