\documentclass[reqno]{amsart}
\usepackage{hyperref}

\AtBeginDocument{{\noindent\small {\em
Electronic Journal of Differential Equations},
 Vol. 2007(2007), No. 46, pp. 1--7.\newline
ISSN: 1072-6691. URL: http://ejde.math.txstate.edu
or http://ejde.math.unt.edu
\newline ftp ejde.math.txstate.edu  (login: ftp)}
\thanks{\copyright 2007 Texas State University - San Marcos.}
\vspace{9mm}}

\begin{document}
\title[\hfilneg EJDE-2007/46\hfil Convergence behaviour of solutions]
{Convergence behaviour of solutions to delay cellular neural
networks with non-periodic coefficients}

\author[B. Xiao, H. Zhang\hfil EJDE-2007/46\hfilneg]
{Bing Xiao, Hong Zhang}  % in alphabetical order

\address{Bing Xiao \newline
Department of Mathematics, 
Hunan University of Arts and Science,
Changde, Hunan 415000, China} 
\email{bingxiao209@yahoo.com.cn}

\address{Hong Zhang \newline
Department of Mathematics,
Hunan University of Arts and Science,
Changde, Hunan 415000, China}
\email{hongzhang320@yahoo.com.cn}

\thanks{Submitted October 7, 2006. Published March 15, 2007.}
\thanks{Supported   by grants 06C586 and
 06C576 from  Scientific research Fund of Hunan \hfill\break\indent
 Provincial Educational Department of China.}

\subjclass[2000]{34C25, 34K13, 34K25}
\keywords{Cellular neural networks;  convergence; delays}

\begin{abstract}
 In this note we studied delay neural networks without
 periodic coefficients. Sufficient conditions are established to ensure
 that all solutions of the networks  converge  to  a  periodic function.
 An example is  given to illustrate our results.
\end{abstract}

\maketitle
\numberwithin{equation}{section}
\newtheorem{theorem}{Theorem}[section]
\newtheorem{lemma}[theorem]{Lemma}
\newtheorem{remark}[theorem]{Remark}


\section{Introduction}

Let $n$ be the number of units in a neural network,
$x_{i}(t)$ be the state vector of the $i$-th unit at time $t$,
$a_{ij}(t)$ be the strength of the $j$-th unit
    on  the $i$-th unit at time $t$, $b_{ij}(t)$ be the strength of the
$j$-th unit     on  the $i$-th unit at time $t-\tau_{ij}(t)$, and
$\tau_{ij}(t)\geq 0$ denote  the   transmission delay of the $i$-th unit
along the axon of the $j$-th unit at the time $t$. It is well known
that the delayed cellular neural networks  are described by the
 differential equations
\begin{equation}
    x_{i}'(t)=-c_{i}(t)x_{i}(t)+\sum^n_{j=1}a_{ij}(t)f_{j}
(x_{j}(t))+\sum^n_{j=1}b_{ij}(t)
    g_{j}(x_{j}(t-\tau_{ij}(t)))+I_{i}(t),    \label{e1.1}
\end{equation}
for $i=1, 2, \dots,  n$,
for any activation functions of signal transmission $f_{j}$ and $ g_{j}$.
Here $I_{i}(t)$ denotes the external bias on the $i$-th unit at the time
$t$, $c_{i}(t)$ represents the rate with which the $i$-th unit will
reset its potential to the resting state in isolation when
disconnected from the network and external inputs at time $t$.

Since the cellular neural networks (CNNs)  were introduced by Chua
and Yang \cite{c1} in 1990,  they have been successfully applied to signal
and image processing, pattern recognition and optimization. Hence,
CNNs have been the object of intensive analysis by numerous
  authors in recent years. In particular, extensive results on the
    problem of the existence and stability of  periodic solutions for
    system \eqref{e1.1} are given out
    in many literatures. We refer the reader to \cite{c2,d1,h1,l1,l2,l3,y1}
 and the  references cited  therein.
Suppose that the following condition holds:
\begin{itemize}
\item[(H0)] $c_{i}, I_{i}, a_{ij}, b_{ij},:\mathbb{R}\to\mathbb{R}$
    are continuous periodic functions, where  $i, j=1,  2,   \dots,  n$.
\end{itemize}

Under this assumption, most of reference  above obtain
that  all solutions of  system \eqref{e1.1} converge  to  a
periodic function.   However, to the best of
our knowledge, few authors have considered
the convergence behavior for solutions of \eqref{e1.1}
without assuming (H0). Thus, it is worth
while to consider the convergence behavior
for solutions of \eqref{e1.1} in this case.

The main purpose of this paper is to give the  new criteria   for
the convergence behavior for all solutions of \eqref{e1.1}. By
applying mathematical analysis techniques, without assuming
(H0),  we derive some sufficient conditions ensuring that all
solutions of system \eqref{e1.1} converge   to  a
    periodic function. These results  are new and
complement of previously known  results.
An example is  provided to illustrate our results.

 Consider the  delay cellular neural networks
\begin{equation}
    x_{i}'(t)=-c^{*}_{i}(t)x_{i}(t)+\sum^n_{j=1}a^{*}_{ij}(t)
f_{j}(x_{j}(t))+\sum^n_{j=1}b^{*}_{ij}(t)
    g_{j}(x_{j}(t-\tau_{ij}(t)))+I^{*}_{i}(t),
         \label{e1.2}
\end{equation}
where $i=1,  2,  \dots,  n$.
For the rest of this paper this paper, for $i,  j=1,  2, \dots,  n$,
  it will be assumed that $c^{*}_{i},
    I^{*}_{i},   a^{*}_{ij},   b^{*}_{ij}, \tau_{ij}:\mathbb{R}\to\mathbb{R}$
are continuous $\omega$-periodic functions.
     Then, we can choose a constant  $\tau $
    such that
\begin{equation}
 \tau=\max_{1\leq i,j\leq n}\big\{\max_{t\in [0,  \omega]}\tau_{ij}(t)\big\}.
    \label{e1.3}
\end{equation}
    We also use  the following conditions:
\begin{itemize}
\item[(H1)]  For each $j\in\{1,  2,  \dots,  n \}$,
 there exist nonnegative constants  $\tilde{L}_{j}$ and $L_{j}$
 such that
\begin{equation}
   |f_{j}(u )-f_{j}(v )|\leq \tilde{L}_{j}|u -v |, \quad
   |g_{j}(u )-g_{j}(v )| \leq L_{j}|u -v |,\quad \forall
     u ,v \in \mathbb{R}. \label{e1.4}
\end{equation}
\item[(H2)]  There exist constants $\eta>0$, $\lambda>0$ and
$\xi_{i}>0$, $i=1,  2,  \dots,  n$, such that for all $t>0$
and $i=1, 2, \dots, n$,
$$
  -[c^{*}_{i}(t) -\lambda]\xi_{i}+
\sum_{j=1}^{n}|a^{*}_{ij}(t)|\tilde{L}_{j}\xi_{j}+
\sum_{j=1}^{n}|b^{*}_{ij}(t) |e^{\lambda \tau}
L_{j}\xi_{j}<-\eta<0,
$$
\item[(H3)]  For $i, j=1,  2,  \dots,  n$,
$c _{i},     I _{i},   a _{ij},   b _{ij}  :\mathbb{R}\to\mathbb{R}$
are  continuous     functions, and
\begin{gather*}
\lim_{t\to +\infty}(c_{i}(t)-c^{*}_{i}(t))=0, \quad
\lim_{t\to +\infty}(I_{i}(t)-I^{*}_{i}(t))=0,\\
\lim_{t\to +\infty}(a_{ij}(t)-a^{*}_{ij}(t))=0,
\lim_{t\to +\infty}(b_{ij}(t)-b^{*}_{ij}(t))=0 .
\end{gather*}
\end{itemize}

The following lemma  will be useful to prove our main results in
    Section 2.

\begin{lemma}[\cite{l2}] \label{lem1.1}
Let   (H1) and (H2) hold. Then system \eqref{e1.2}  has  exactly one
$\omega$-periodic  solution.
\end{lemma}

As usual, we introduce the phase space $ C([-\tau, 0]; \mathbb{R}^n )$
as a Banach space of continuous mappings from $[-\tau, 0]$ to
$\mathbb{R}^n$ equipped with the supremum norm,
$$
\|\varphi\|=\max_{1\leq i\leq n}\sup_{-\tau\leq
t\leq0}|\varphi_{i}(t)|
$$
for  all $\varphi=(\varphi_{1}(t), \varphi_{2}(t), \dots,
\varphi_{n}(t))^{T}\in C([-\tau, 0]; \mathbb{R}^n )$.

The initial conditions associated with system
\eqref{e1.1} are of the form
\begin{equation}
x_{i}(s)=\varphi_{i}(s),\quad
s\in [-\tau, 0], \quad  i=1,2,\dots,n, \label{e1.5}
\end{equation}
where $\varphi=(\varphi_{1}(t), \varphi_{2}(t), \dots,
\varphi_{n}(t))^{T}\in  C([-\tau, 0]; \mathbb{R}^n)$.

For $ Z(t)=(x_{1}(t), x_{2}(t),\dots,x_{n}(t))^{T} $, we define the
norm
$$
\|Z(t)\|_{\xi}=\max_{i=1,2,\dots,n}|\xi^{-1}_{i}x_{i}(t)|.
$$


The remaining part of this paper is organized as follows. In Section
2, we present some new sufficient conditions to ensure  that all
solutions of system \eqref{e1.1}  converge  to  a
    periodic function.   In Section 3, we shall give some examples and
remarks to illustrate our results obtained in the previous sections.

\section{Main Results}

\begin{theorem} \label{thm2.1}
Assume (H1)--(H3) and that $Z^{*}(t)=(x^{*}_{1}(t),
x^{*}_{2}(t),\dots,x^{*}_{n}(t))^{T}$ is the $\omega$-periodic
solution of \eqref{e1.2}.  Then every solution
$$
 Z(t)=(x_{1}(t), x_{2}(t),\dots,x_{n}(t))^{T}
$$
of  \eqref{e1.1} with  initial value
$ \varphi=(\varphi_{1}(t), \varphi_{2}(t), \dots,
\varphi_{n}(t))^{T} \in  C([-\tau, 0]; \mathbb{R}^n )$, satisfies
$$
\lim_{t\to +\infty}|x_{ i}(t)-x^{*}_{i }(t)|=0,
  \quad i= 1,  2, \dots,  n.
$$
\end{theorem}

\begin{proof} Set
\begin{align*}
\delta_{i}(t)&=-[c_{i}(t)-c^{*}_{i}(t)]x^{*}_{i}(t)
  +\sum^n_{j=1}[a_{ij}(t)-a^{*}_{ij}(t)]f_{j}(x^{*}_{j}(t))\\
&\quad  +\sum^n_{j=1}[b_{ij}(t)-b^{*}_{ij}(t)]g_{j}(x^{*}_{j}
(t-\tau_{ij}(t)))+[I_{i}(t)-I^{*}_{i}(t)],
\end{align*}
 where $i= 1, 2,  \dots, n$. Since $Z^{*}(t)=(x^{*}_{1}(t),
x^{*}_{2}(t),\dots,x^{*}_{n}(t))^{T}$ is $\omega$-periodic,
together with $(H_{2})$ and (H3), then
for all $\epsilon >0$, we can choose a sufficient large constant
$T>0$ such that
\begin{equation}
| \delta_{i}(t) |<\frac{1}{4}\eta \epsilon, \quad  \mbox{ for  all }
t\geq T,
 \label{e2.1}
\end{equation}
and
\begin{equation}
  -[c _{i}(t) -\lambda]\xi_{i}+
\sum_{j=1}^{n}|a _{ij}(t)|\tilde{L}_{j}\xi_{j}+
\sum_{j=1}^{n}|b _{ij}(t) |e^{\lambda \tau}
L_{j}\xi_{j}<-\frac{1}{2}\eta<0,    \label{e2.2}
\end{equation}
 for  all  $t\geq T$,  $i=1,  2,  \dots,  n$.
 Let $ Z(t)=(x_{1}(t), x_{2}(t),\dots,x_{n}(t))^{T} $ be a solution of
 \eqref{e1.1} with  initial value
$ \varphi=(\varphi_{1}(t),
\varphi_{2}(t), \dots, \varphi_{n}(t))^{T} \in  C([-\tau,0]; \mathbb{R}^n )$.
Define
$$
u(t)=(u_{1}(t), u_{2}(t),\dots,u_{n}(t))^{T}=Z(t)-Z^{*}(t).
$$
Then for $i=1, 2, \dots, n$,
\begin{equation} \label{e2.3}
\begin{aligned}
u_{i}'(t)&=-c_{i}(t)u_{i}(t)+\sum^n_{j=1}a_{ij}(t)[f_{j}(x_{j}(t))
   -f_{j}(x^{*}_{j}(t))]   \\
&\quad + \sum^n_{j=1}b_{ij}(t)[ g_{j}(x_{j}(t-\tau_{ij}(t)))
  -g_{j}(x^{*}_{j}(t-\tau_{ij}(t)))] +\delta_{i}(t)\,.
\end{aligned}
\end{equation}
 Let $i_{t}$ be an index such that
\begin{equation}
\xi^{-1}_{i_{t}}|u_{i_{t}}(t)|= \|u (t)\|_{\xi}. \label{e2.4}
\end{equation}
Calculating the upper right derivative of
$ e^{\lambda s}|u_{i_{s}}(s)|$ along   \eqref{e2.3}, in view of
\eqref{e2.1} and (H1), we have
\begin{equation}
\begin{aligned}
&D^+(e^{\lambda s}|u_{i_{s}}(s)|)\Big|_{s=t}\\
& =  \lambda e^{\lambda t}|u_{i_{t}}(t)|
    +e^{\lambda t}\mathop{\rm sign} (u_{i_{t}}(t))\Big\{-c_{i_{t}}(t)u_{i_{t}}(t)
    +\sum^n_{j=1}a_{i_{t}j}(t)[f_{j}(x_{j}(t))-f_{j}(x^{*}_{j}(t))]
    \\
& \quad +\sum^n_{j=1}b_{i_{t}j}(t)[    g_{j}(x_{j}(t-\tau_{i_{t}j}(t)))
  -g_{j}(x^{*}_{j}(t-\tau_{i_{t}j}(t)))]+\delta_{i_{t}}(t)\Big \}\\
& \leq   e^{\lambda t}\Big\{-[c_{i_{t}}(t)
-\lambda]|u_{i_{t}}(t)|\xi^{-1}_{i_{t}}\xi_{i_{t}} +
\sum_{j=1}^{n}a_{i_{t}j}(t )  \tilde{L}_{j}|u_{j  }(t )|\xi^{-1}_{j}\xi_{j}\\
&\quad  +\sum_{j=1}^{n} b_{i_{t}j}(t) L_{j}|u_{j}(t-\tau_{i_{t}j}(t))|
\xi^{-1}_{j }\xi_{j} \Big\}+
\frac{1}{4}\eta\epsilon e^{\lambda t}.    \label{e2.5}
\end{aligned}
\end{equation}
Let
\begin{equation}
M(t)=\max_{-\tau \leq s\leq t} \{e^{\lambda s}\|u (s)\|_{\xi}\}.
\label{e2.6}
\end{equation}
It is obvious that $e^{\lambda t}\|u (t)\|_{\xi}\leq M(t)$, and
$M(t)$ is non-decreasing.  Now, we  consider two
cases.

 \noindent  \textbf{Case (i).}  If
\begin{equation}
M(t)>  e^{\lambda t}\|u (t)\|_{\xi} \quad \mbox{  for  all  } t\geq T.
\label{e2.7}
\end{equation}
   Then, we claim that
\begin{equation}
M(t)\equiv M(T) \label{e2.8}
\end{equation}
which is  a   constant  for  all $ t\geq T$.
 By way of contradiction, assume that \eqref{e2.8} does not hold.
Consequently, there    exists
 $t_{1}>T$ such that $M(t_{1})> M(T)$. Since
$$
e^{\lambda t}\|u (t)\|_{\xi}\leq M(T) \quad
    \mbox{ for  all  } -\tau \leq t\leq T.
$$
There must   exist $\beta \in (T,  t_{1})$ such that
$$
e^{\lambda \beta}\|u (\beta)\|_{\xi}= M(t_{1})\geq  M(\beta),
$$
which contradicts  \eqref{e2.7}. This contradiction implies  \eqref{e2.8}.
It follows that there exists $t_{2}>T$ such that
\begin{equation}
\|u (t)\|_{\xi} < e^{-\lambda t}M(t)= e^{-\lambda t}M(T)<\epsilon
\quad\mbox{  for  all  } t\geq t_{2}. \label{e2.9}
\end{equation}

\noindent   \textbf{Case (ii).}
 If there is a point $t_{0}\geq T$ such that
$M(t_{0})=  e^{\lambda t_{0}}\|u (t_{0})\|_{\xi}$, Then, using
 \eqref{e2.1}, \eqref{e2.2} and \eqref{e2.5}, we get
\begin{align*}
&D^+(e^{\lambda s}|u_{i_{s}}(s)|)\Big|_{s=t_{0}}\\
& \leq \Big\{-[c_{i_{t_{0}}}(t_{0}) -\lambda]e^{\lambda
t_{0}}|u_{i_{t_{0}}}(t_{0})|\xi^{-1}_{i_{t_{0}}}\xi_{i_{t_{0}}} +
\sum_{j=1}^{n}a_{i_{t_{0}}j}(t_{0} )
 \tilde{L}_{j}e^{\lambda t_{0} }|u_{j}(t_{0} )|\xi^{-1}_{j } \xi_{j}\\
& \quad   +\sum_{j=1}^{n} b_{i_{t_{0}}j}(t_{0})  L_{j}e^{\lambda
(t_{0}-\tau_{i_{t_{0}}j}(t_{0}))}|u_{j
}(t_{0}-\tau_{i_{t_{0}}j}(t_{0}))| \xi^{-1}_{j }e^{\lambda
 \tau_{i_{t_{0}}j}(t_{0}) }\xi_{j} \Big\}+
\frac{1}{4}\eta\epsilon e^{\lambda t_{0}}\\
 & \leq
\Big\{-[c_{i_{t_{0}}}(t_{0}) -\lambda] \xi_{i_{t_{0}}} +
\sum_{j=1}^{n}a_{i_{t_{0}}j}(t_{0} )
 \tilde{L}_{j}  \xi_{j} +\sum_{j=1}^{n} b_{i_{t_{0}}j}(t_{0}) e^{\lambda
 \tau  } L_{j} \xi_{j} \Big\}M(t_{0})+
\frac{1}{4}\eta\epsilon e^{\lambda t_{0}}\\
& < -\frac{1}{2}\eta M(t_{0})+
 \frac{1}{2}\eta\epsilon e^{\lambda t_{0}}.
\end{align*} %\label{e2.10}
In addition, if $M(t_{0})\geq  \epsilon e^{\lambda t_{0}}$,
 then $M(t )$ is strictly decreasing in a small neighborhood
$(t_{0},  t_{0}+\delta_{0})$.  This contradicts that $M(t)$ is
non-decreasing.
   Hence,
\begin{equation}
e^{\lambda t_{0}}\|u (t_{0})\|_{\xi}=M(t_{0})<  \epsilon e^{\lambda t_{0}},
  \quad \mbox{and}\quad \|u (t_{0})\|_{\xi} <  \epsilon. \label{e2.11}
\end{equation}
   Furthermore, for any $t>t_{0}$,   by the same approach
   used in the  proof of \eqref{e2.11}, we have
$$
e^{\lambda t }\|u (t )\|_{\xi} <  \epsilon e^{\lambda t }, \quad
   \mbox{and}\quad
 \|u (t )\|_{\xi} <  \epsilon , \quad \mbox{if}\quad
   M(t )=  e^{\lambda t }\|u (t )\|_{\xi}. %{e2.12}
$$
 On the other hand, if $M(t )>  e^{\lambda t }\|u (t )\|_{\xi}, t>t_{0}$.
We can choose $t_{0}\leq t_{3}<t$ such that
$$
M(t_{3} )=  e^{\lambda t_{3}}\|u (t_{3} )\|_{\xi},  \quad
\|u (t_{3})\|_{\xi} <  \epsilon ,\quad
M(s)>  e^{\lambda s }\|u (s )\|_{\xi} \quad \mbox{for  all }
 s\in (t_{3},  t].
$$
Using  a similar argument as in the proof
    of  \textbf{Case (i)}, we can show  that
$$
M(s)\equiv M(t_{3})  %{e21.3}
$$
which is a  constant for  all $ s\in ( t_{3},t]$
which implies that
$$
\|u (t )\|_{\xi} <  e^{-\lambda t }M(t)
  =  e^{-\lambda t }M(t_{3})=\|u (t_{3})\|_{\xi} e^{-\lambda (t-t_{3}) }
<\epsilon.
$$
In summary, there must exist $N>0$ such that $\|u (t )\|_{\xi}\leq \epsilon$
holds for all $t>N$. This completes the proof of Theorem 2.1.
\end{proof}

\begin{remark} \label{rmk2.1} \rm
 Without assumption (H3), we suppose that
\begin{itemize}
\item[(H1*)] For each $j\in\{1,  2,\dots,  n \}$,   there exist
nonnegative constants $L_{j}$ and  $\tilde{L}_{j}$  such that
$$
   |f_{j}(u )|\leq \tilde{L}_{j}|u |,   \quad
   |g_{j}(u )| \leq L_{j}|u  |, \quad \mbox{for  all }
     u  \in \mathbb{R} %\eqref{e1.4}
$$
\item[(H2*)] There exist constants $K>0$, $\eta>0$, $\lambda>0$ and
$\xi_{i}>0$, $i=1, 2,\dots,  n$, such that for all $t>K$,
there  holds
$$
  -[c_{i}(t) -\lambda]\xi_{i}+
\sum_{j=1}^{n}|a_{ij}(t)|\tilde{L}_{j}\xi_{j}+
\sum_{j=1}^{n}|b_{ij}(t) |e^{\lambda \tau}
L_{j}\xi_{j}<-\eta<0,
$$
  are satisfied for $i=1, 2,\dots,  n$.
\end{itemize}
 Moreover,  assume that   $I _{i} (t)$  converge  to zero as $t\to \infty$.
 Then, applying the similar mathematical analysis techniques in this paper,
we find that the sufficient  conditions can be  established to ensure
the  convergence to zero of all solutions of  \eqref{e1.1}.
\end{remark}

 \begin{remark} \label{rmk2.2} \rm
 If the original system has asymptotically periodic
coefficients,  there  may not be sufficient conditions ensuring
the existence  and the convergence of periodic solutions, which is
the case in the example $x '(t)= -x (t)+\frac{1}{ t^{ 2}+1 }  \sin ^{2}t$.
\end{remark}

\section{An Example}

In this section, we illustrate the results obtained in previous sections.
Consider the  cellular neural networks  with    time-varying
 delays:
\begin{equation} \label{e3.1}
\begin{gathered}
\begin{aligned}
 x_{1}'(t)&=- (1-\frac{2}{1+|t|})x_{1}(t)
 +(\frac{1}{4}+\frac{t}{1+t^{2}} )f_{1}(x_{1}(t ))
 +(\frac{1}{36}+\frac{2t}{1+t^{2}})  f_{2}(x_{2}(t )) \\
 &\quad +(\frac{1}{4}+\frac{t}{2+t^{2}} ) g_{1}(x_{1}(t-\sin ^{2}t))
+(\frac{1}{36}+\frac{4t}{1+t^{2}} )
g_{2}(x_{2}(t-2\sin ^{2}t))\\
&\quad +(\cos t +\frac{t}{1+t^{2}} ),
\end{aligned}
\\
\begin{aligned}
 x_{2}'(t)&=-(1-\frac{4}{1+2|t|})x_{2}(t)
 +(1+\frac{t}{1+t^{2}} )f_{1}(x_{1}(t ))
 +(\frac{1}{4} +\frac{5t}{1+t^{2}})f(x_{2}(t ))\\
&\quad +  (1+\frac{t}{1+6t^{2}} )g_{1}(x_{1}(t-5\sin ^{2}t))
 +(\frac{1}{4}+\frac{t}{8+t^{2}} )
g_{2}(x_{2}(t-\sin ^{4}t))\\
&\quad +(\sin t +\frac{t}{1+t^{6}}),
\end{aligned}
\end{gathered}
\end{equation}
where $f_{1}(x)=f_{2}(x)=g_{1}(x)=g_{2}(x)=\arctan x$.

Noting the following cellular neural networks
\begin{equation} \label{e3.2}
\begin{gathered}
\begin{aligned}
 x_{1}'(t)&=-  x_{1}(t) + \frac{1}{4} f_{1}(x_{1}(t ))
 + \frac{1}{36}
 f_{2}(x_{2}(t )) + \frac{1}{4}  g_{1}(x_{1}(t-\sin ^{2}t)) \\
&\quad + \frac{1}{36}  g_{2}(x_{2}(t-2\sin ^{2}t))+ \cos t  ,
\end{aligned}
  \\
\begin{aligned}
 x_{2}'(t)&=- x_{2}(t) + f_{1}(x_{1}(t )) + \frac{1}{4} f(x_{2}(t ))+
   g_{1}(x_{1}(t-5\sin ^{2}t))\\
&\quad + \frac{1}{4}g_{2}(x_{2}(t-\sin ^{4}t)) + \sin t ,
\end{aligned}
\end{gathered}
\end{equation}
where
\begin{gather*}
 c^{*}_{1}(t)=c^{*}_{2}(t)= L _{1}=L _{2}=\tilde{L}_{1}=\tilde{L} _{2}=1, \\
 a^{*}_{11}(t) = b^{*}_{11} (t)=\frac{1}{4},  \quad
 a^{*}_{12}(t) = b^{*}_{12}(t) = \frac{1}{36},\\
 a^{*}_{21}(t) = b^{*}_{21}(t) =1,  \quad
 a^{*}_{22} (t)= b^{*}_{22}(t) =\frac{1}{4},  \quad
 \tau=5 .
\end{gather*}
Then
\begin{gather*}
d_{ij}=\frac{1}{c^{*}_{i}(t)}  ( a^{*}_{ij}(t)\tilde{L}_j
+ b^{*}_{ij}(t) L_j) \quad i,j=1,2, \\
D =(d_{ij})_{2\times 2}= \begin{pmatrix}
               1/2&  1/18 \\
               2& 1/2 \end{pmatrix}.
\end{gather*}
Hence,   $\rho(D )=5/6<1$. It follows
from the theory of $M$-matrix in [9] that there exist constants
$\bar{\eta}>0 $ and  $\xi_{i}>0$, $i=1, 2$, such that for all
$t>0$, there     holds
$$
  - c^{*}_{i}(t)  \xi_{i}+
\sum_{j=1}^{n}|a^{*}_{ij}(t)|\tilde{L}_{j}\xi_{j}+
\sum_{j=1}^{n}|b^{*}_{ij}(t) |  L_{j}\xi_{j}<-\bar{\eta}<0, \quad
 i=1,2.
$$
Then, we can choose constants $\eta>0$ and  $0<\lambda<1 $  such that
$$
  -[c^{*}_{i}(t) -\lambda]\xi_{i}+
\sum_{j=1}^{n}|a^{*}_{ij}(t)|\tilde{L}_{j}\xi_{j}+
\sum_{j=1}^{n}|b^{*}_{ij}(t) |e^{\lambda \tau} L_{j}\xi_{j}<-\eta<0,
\quad i=1,  2,\; \forall t>0,
$$
which implies that systems \eqref{e3.1} and  \eqref{e3.2} satisfy
(H1)--(H3).
Hence,  from Lemma 1.1 and Theorem 2.1,  system \eqref{e3.2} has exactly
one  $2\pi$-periodic solution. Moreover, all
solutions of  \eqref{e3.1} converge to  the
    periodic solution of \eqref{e3.2}.

\begin{remark} \label{rmk3.1} \rm
 Since CNNs \eqref{e3.1} is a  delayed neural
networks without  periodic coefficients, the results in
\cite{c2,d1,h1,l1,l2,l3,y1} and the  references therein can
not be applied to prove that all
solutions  converge  to  a  periodic function.
This implies that the results of this paper are essentially new
nd they complement  previously  known results.
\end{remark}

\begin{thebibliography}{0}

\bibitem{b1}  A. Berman and R. J. Plemmons;
\emph{Nonnegative Matrices in the Mathematical Science},
 Academic Press, New York, 1979.

\bibitem{c1} L. O. Chua, T. Roska;
\emph{Cellular neural networks with nonlinear and delay-type template elements},
in: Proc. 1990 IEEE Int. Workshop on Cellular Neural Networks and
Their Applications, 1990, pp. 12-25.

\bibitem{c2}  J. Cao;
\emph{New results concerning exponential stability and periodic solutions
of delayed cellular neural networks with delays},
     Physics Letters A 307 (2003), 136-147.

\bibitem{h1}  H. Huang, J. Cao and J. Wang;
\emph{Global exponential stability and periodic solutions of recurrent
     cellular neural networks with delays},
     Physics Letters A 298 (5-6) (2002), 393-404.

\bibitem{d1}  Q. Dong, K. Matsui and X. Huang;
\emph{Existence and stability of periodic solutions
     for Hopfield neural network equations with periodic input},
Nonlinear Analysis 49 (2002), 471-479.

\bibitem{l1} Z. Liu and L. Liao;
\emph{Existence and global exponential stability
    of periodic solutions of cellular neural
    networks with time-vary delays}, Journal of Mathematical Analysis And
    Applications, 290 (2) (2004), 247-262.

\bibitem{l2}  B. Liu and L. Huang;
\emph{Existence and exponential stability of periodic solutions for
cellular neural networks with time-varying delays}, Physics
Letters A, 349  (2006), 474-483.

\bibitem{l3} W. Lu and T. Chen;
\emph{On periodic Dynamical systems},
Chinese Annals of Mathematics,   B(25)  (2004) 455-462.

\bibitem{y1} K. Yuan, J. Cao and J. Deng;
\emph{Exponential stability and periodic solutions of fuzzy cellular
neural networks with time-varying delays}, Neurocomputing, 69 (2006),
1619-1627.

\end{thebibliography}

\end{document}
