\subsection{Smoothing}
In this section, we will present the smoothing results for the linear signal
model. The problem objective remains the same as that of Sec. 2.4, i.e. to get
smoothed estimate based on the filtered estimate $\hat X_T = (\hat x_1, \ldots, \hat x_T)$ obtained from a fixed set of data $Y_T$ and the backward recursive
process (also known as the adjoint process in the risk-sensitive control problem in \cite{EMoore}) to be defined shortly. We need the following definitions in our
subsequent analysis.
\begin{definition}
Define the unnormalised density of the smoothed estimate $\gamma_{k,T}(x)$
such that
$$
\gamma_{k,T}(x) = \hat E [ \bar \Lambda_T \bar \Gamma_T \theta \exp (\theta
\hat \Psi_{0,T}) I(x_k \in dx) \; | \; {\cal Y}_T, \hat X_T ] $$
and
the unnormalised density of the backward recursive process $\beta_{k,T}(x)$ as
$$
\beta_{k,T}(x) = \hat E [ \bar \Lambda_{k,T} \bar \Gamma_{k+1, T} \exp (\theta
\hat \Psi_{k,T})\; | \; x_k = x, {\cal Y}_T, \hat X_{k}^T ] $$
where $\hat X_{k}^T$ and $\bar \Lambda_{k,T}$ are as defined in Sec. 2.4 and
$\bar \Gamma_{m,n} = \Pi_{k=m}^n \bar \gamma_k$.
\label{def:bglin} \end{definition}
With these definitions, it can be easily proved (quite similar to the proofs
done in Sec. 2.4) that the following Lemma and Theorem hold.
\begin{lemma}
The process $\beta_{k,T}(x)$ satisfies the following backward recursion
\beq
\beta_{k,T}(x) = \frac {\phi_k (y_k - C_kx)}{\phi_k(y_k)} \exp (\frac {\theta}
{2} (x - \hat x_k)^{\p} Q_k (x - \hat x_k)) \int_{\reals^n} \psi_{k+1}(\xi -
A_kx) \beta_{k+1, T}(\xi) d\xi \label{eq:recbetalin} \eeq
with $$ \beta_{T,T}(x) = \frac {\phi_T (y_T - C_Tx)}{\phi_T(y_T)}
\exp (\frac {\theta}{2} (x - \hat x_T)^{\p} Q_T (x - \hat x_T)) $$
\label{blemmalin} \end{lemma}
\begin{theorem}
The unnormalized density function of the smoothed estimate, $\gamma_{k,T}(x)$
can be expressed as
\beq
\gamma_{k,T}(x) = \alpha_k(x) \beta_{k,T}(x) \label{eq:gammalin} \eeq
\end{theorem}
So far, we have not actually used the fact that $x_k, y_k$ are gaussian distributed and the linear properties of the signal model. If we exploit that,
we come up with the following two theorems expressing that both $\beta_{k,T}(x)$ and $\gamma_{k,T}(x)$ can be expressed as unnormalised Gaussian densities.
\begin{theorem}
The backward recursive process $\beta_{k,T}(x)$ is an unnormalized Gaussian
density given by
\beq
\beta_{k,T}(x) = \beta_k(x, \rho_k) =
K_k \exp (-\frac{1}{2} (x - \eta_k)^{\p} P_k^ {-1} (x - \eta_k)) \label{eq:betagauss} \eeq
where $\rho_k = (\eta_k, P_k, K_k)$ and $P_k^{-1}\eta_k, P_k^{-1}, K_k$ are
given by the following backward algebraic recursions:
\beqa
P_k^{-1}\eta_k = A_k^{\p} W_{k+1}^{-1}S_kP_{k+1}^{-1} \eta_{k+1} + C_k^{\p}
V_k^{-1} y_k - \theta Q_k \hat x_k \\
P_k^{-1} = A_k^{\p} (W_{k+1}^{-1} - W_{k+1}^{-1}S_kW_{k+1}^{-1})A_k + C_k^{\p}
V_k^{-1}C_k - \theta Q_k \\
K_k = K_{k+1} \left| W_{k+1} \right|^{-\frac {1}{2}} \left| S_k
\right|^{\frac {1}{2}} N_k(P_{k+1}^{-1}\eta_{k+1}, \hat x_k, y_k)
\eeqa
where $S_k = (W_{k+1}^{-1} + P_{k+1}^{-1})^{-1}$ and $N_k(P_{k+1}^{-1}\eta_{k+1}, \hat x_k, y_k)$ is an exponential of a quadratic form in its arguments.
\label{betatheolin} \end{theorem}
\begin{remark}
We assume here that $A_k^{\p} (W_{k+1}^{-1} - W_{k+1}^{-1}S_kW_{k+1}^{-1})A_k + C_k^{\p}
V_k^{-1}C_k - \theta Q_k > 0$ such that $P_k$ exists. Also we do not provide
the exact form $N_k$ because it does not contribute to the solution of the
problem.
\end{remark}
\begin{proof}
Similar to the proof of Theorem \ref{lintheo}.
\end{proof}
\begin{theorem}
The unnormalized density of the smoothed estimate, $\gamma_{k,T}(x)$ can be expressed as a
Gaussian density $\bar K_k \exp (-\frac{1}{2} (x - \bar \eta_k)^{\p} {\bar P_k}^ {-1} (x - \bar \eta_k))$ where $\bar \eta_k$ and $\bar P_k$ are given by
\beqa
\bar \eta_k = \bar P_k (P_k^{-1}\eta_k + R_k^{-1} \mu_k) \\
\bar P_k = (P_k^{-1} + R_k^{-1})^{-1} \label{eq:smlinesti} \eeqa
\end{theorem}
\begin{proof}
Using the Gaussian form of $\alpha_k(x)$ and $\beta_{k, T}(x)$, it is not hard
to see that (by using (\ref{eq:gammalin})) $\gamma_{k,T}(x)$ can be written in
a Gaussian form as well, and we can get (\ref{eq:smlinesti}) by completing
the square. The existence of $\bar P_k$ is guaranteed by the existence of
$P_k$ and $R_k$.
\end{proof}