204 lines
4.9 KiB
TeX
204 lines
4.9 KiB
TeX
\chapter{Elements of Linear Algebra}
|
|
\label{ch:elements-of-linear-algebra}
|
|
|
|
\begin{remark}[vector]
|
|
Let $u$ a vector, we will use interchangeably the following notations: $u$ and $\vec{u}$
|
|
\end{remark}
|
|
|
|
Let $u = \begin{pmatrix}
|
|
u_1 \\
|
|
\vdots \\
|
|
u_n
|
|
\end{pmatrix}$ and $v = \begin{pmatrix}
|
|
v_1 \\
|
|
\vdots \\
|
|
v_n
|
|
\end{pmatrix}$
|
|
|
|
\begin{definition}[Scalar Product (Dot Product)]
|
|
\begin{align*}
|
|
\scalar{u, v} & = \begin{pmatrix}
|
|
u_1, \ldots, u_v
|
|
\end{pmatrix}
|
|
\begin{pmatrix}
|
|
v_1 \\
|
|
\vdots \\
|
|
v_n
|
|
\end{pmatrix} \\
|
|
& = u_1 v_1 + u_2 v_2 + \ldots + u_n v_n
|
|
\end{align*}
|
|
|
|
We may use $\scalar{u, v}$ or $u \cdot v$ notations.
|
|
\end{definition}
|
|
\paragraph{Dot product properties}
|
|
\begin{itemize}
|
|
\item $\scalar{u, v} = \scalar{v, u}$
|
|
\item $\scalar{(u+v), w} = \scalar{u, w} + \scalar{v, w}$
|
|
\item $\scalar{u, v}$
|
|
\item $\scalar{\vec{u}, \vec{v}} = \norm{\vec{u}} \times \norm{\vec{v}} \times \cos(\widehat{\vec{u}, \vec{v}})$
|
|
\end{itemize}
|
|
|
|
\begin{definition}[Norm]
|
|
Length of the vector.
|
|
\[
|
|
\norm{u} = \sqrt{\scalar{u, v}}
|
|
\]
|
|
|
|
$\norm{u, v} > 0$
|
|
\end{definition}
|
|
|
|
\begin{definition}[Distance]
|
|
\[
|
|
dist(u, v) = \norm{u-v}
|
|
\]
|
|
\end{definition}
|
|
|
|
\begin{definition}[Orthogonality]
|
|
|
|
\end{definition}
|
|
|
|
\begin{remark}
|
|
\[
|
|
(dist(u, v))^2 = \norm{u - v}^2,
|
|
\] and
|
|
\[
|
|
\scalar{v-u, v-u}
|
|
\]
|
|
\end{remark}
|
|
|
|
\begin{figure}
|
|
\centering
|
|
\includestandalone{figures/schemes/vector_orthogonality}
|
|
\caption{Scalar product of two orthogonal vectors.}
|
|
\label{fig:scheme-orthogonal-scalar-product}
|
|
\end{figure}
|
|
|
|
\begin{align*}
|
|
\scalar{v-u, v-u} & = \scalar{v, v} + \scalar{u, u} - 2 \scalar{u, v} \\
|
|
& = \norm{v}^2 + \norm{u}^2 \\
|
|
& = -2 \scalar{u, v}
|
|
\end{align*}
|
|
|
|
\begin{align*}
|
|
\norm{u - v}^2 & = \norm{u}^2 + \norm{v}^2 - 2 \scalar{u,v} \\
|
|
\norm{u + v}^2 & = \norm{u}^2 + \norm{v}^2 + 2 \scalar{u,v}
|
|
\end{align*}
|
|
|
|
\begin{proposition}[Scalar product of orthogonal vectors]
|
|
\[
|
|
u \perp v \Leftrightarrow \scalar{u, v} = 0
|
|
\]
|
|
\end{proposition}
|
|
|
|
\begin{proof}[Indeed]
|
|
$\norm{u-v}^2 = \norm{u+v}^2$, as illustrated in \autoref{fig:scheme-orthogonal-scalar-product}.
|
|
\begin{align*}
|
|
\Leftrightarrow & -2 \scalar{u, v} = 2 \scalar{u, v} \\
|
|
\Leftrightarrow & 4 \scalar{u, v} = 0 \\
|
|
\Leftrightarrow & \scalar{u, v} = 0
|
|
\end{align*}
|
|
\end{proof}
|
|
|
|
\begin{theorem}{Pythagorean theorem}
|
|
If $u \perp v$, then $\norm{u+v}^2 = \norm{u}^2 + \norm{v}^2$ .
|
|
\end{theorem}
|
|
|
|
\begin{definition}[Orthogonal Projection]
|
|
|
|
\end{definition}
|
|
Let $y = \begin{pmatrix}
|
|
y_1 \\
|
|
. \\
|
|
y_n
|
|
\end{pmatrix} \in \RR[n]$ and $w$ a subspace of $\RR[n]$
|
|
$\mathcal{Y}$ can be written as the orthogonal projection of $y$ on $w$:
|
|
\[
|
|
\mathcal{Y} = proj^w(y) + z,
|
|
\]
|
|
where
|
|
\[
|
|
\begin{cases}
|
|
z \in w^\perp \\
|
|
proj^w(y) \in w
|
|
\end{cases}
|
|
\]
|
|
There is only one vector $\mathcal{Y}$ that ?
|
|
|
|
The scalar product between $z$ and (?) is zero.
|
|
|
|
\begin{property}
|
|
$proj^w(y)$ is the closest vector to $y$ that belongs to $w$.
|
|
\end{property}
|
|
|
|
\begin{definition}[Matrix]
|
|
A matrix is an application, that is, a function that transform a thing into another, it is a linear function.
|
|
\end{definition}
|
|
|
|
\begin{example}[Matrix application]
|
|
|
|
Let $A$ be a matrix:
|
|
\[
|
|
A = \begin{pmatrix}
|
|
a & b \\
|
|
c & d
|
|
\end{pmatrix}
|
|
\] and
|
|
\[
|
|
x = \begin{pmatrix}
|
|
x_1 \\
|
|
x_2
|
|
\end{pmatrix}
|
|
\]
|
|
Then,
|
|
\begin{align*}
|
|
Ax & = \begin{pmatrix}
|
|
a & b \\
|
|
c & d
|
|
\end{pmatrix}
|
|
\begin{pmatrix}
|
|
x_1 \\
|
|
x_2
|
|
\end{pmatrix} \\
|
|
& = \begin{pmatrix}
|
|
a x_1 + b x_2 \\
|
|
c x_1 + d x_2
|
|
\end{pmatrix}
|
|
\end{align*}
|
|
|
|
Similarly,
|
|
\begin{align*}
|
|
\begin{pmatrix}
|
|
a & b & c & d \\
|
|
e & f & g & h \\
|
|
i & j & k & l
|
|
\end{pmatrix}
|
|
\begin{pmatrix}
|
|
x_1 \\
|
|
x_2 \\
|
|
x_3 \\
|
|
x_4
|
|
\end{pmatrix}
|
|
& = \begin{pmatrix}
|
|
a x_1 + b x_2 + c x_3 \ldots
|
|
\end{pmatrix}
|
|
\end{align*}
|
|
\end{example}
|
|
|
|
The number of columns has to be the same as the dimension of the vector to which the matrix is applied.
|
|
|
|
\begin{definition}[Tranpose of a Matrix]
|
|
Let $A = \begin{pmatrix}
|
|
a & b \\
|
|
c & d
|
|
\end{pmatrix}$, then $A^\T = \begin{pmatrix}
|
|
a & c \\
|
|
b & d
|
|
\end{pmatrix}$
|
|
\end{definition}
|
|
|
|
\begin{figure}
|
|
\centering
|
|
\includestandalone{figures/schemes/coordinates_systems}
|
|
\caption{Coordinate systems}
|
|
\end{figure}
|